Skip to content

Commit 79e9b57

Browse files
committed
OpenRouter supported model parameters
1 parent 48ca890 commit 79e9b57

File tree

8 files changed

+119
-13
lines changed

8 files changed

+119
-13
lines changed

src/api/providers/fetchers/__tests__/fixtures/openrouter-model-endpoints.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,16 @@
66
"body": "",
77
"status": 200,
88
"response": [
9-
"31441d002056aa5ad5de6cfba09eb44cd983cf558aa50307224fd48d88f0c0d12137eda7bef1c435891ecc325645bf9d4794cd227137c069a7450a3f6ea3541aeacce9727170159a489e4b07a179ae738dc1a983bd860cb018631c277e3ab29720d5dea2ad528e551ef3c67c0e83e03cc3e22da9c6d2dbbb03ed2d5afa96237dbbe0d4e5e379806d0ef657edc161db2c0d863cfc7525951860c1af95425fdef6f1e177a1a24eb98a9b4ab75cb9acf4e63df938f044074a6c06dac44cda2750e3aa6e1246437d1cde032d10d0fceac4d20b07958df4a4aeec4affaa012d9b3eb5d0e3c33fdd4ad849181f1ffe53efd2b0f7f70b17431cdc7a92309228d5154e736588069b1ce7714bce6952e85c744b1cb672c175e424fda500d2300b1b3041bffe4209e02917760c1a225f6c218da952e14c3eaba01868e2fc07a68969cda1df7a9777e56ff7021bc945ab34b99e29c5222ab6214868114c9f3ebfc91c1c358cbac63aba3c18cabc99b8570923ed7b493445434205c506e4261983e7a03ac145e5e4177400cabf2a713a933092e58c0b18a4ecdf48b9d73933ec3534ee38c815670864c1a091d593757a991836ccd364e0e3e026d14b58285fe813f16ee4eaa5f285b20969d68ece56b8c01e61f98b7837320c3632314e0ce2acf4b627b7061c86ca07350aecd135c00ba71b0a08efaa5e567b2d0cbc9adc95fbb8146c53ef1fb6072b8394a59730c25e23e5e893c2a25ed4755dd70db7e0d3c42101aeda3430c89cb7df048b5a2990a64ddbac6070ceebeefc16f4f805e51cdcd44502b278439ab5eb5dbfe52eb31b84c8552f1b9aaaf32ccab7a459896918a4f4096b035bdf1a6cccc99db59ac1e0d7ec82ca95d307726386bbe8b4243aff7b14d855db2e5b0ad032c82ac88aecad09dd4eab813d6282a8dd0d947de2ecb0656ea03175e91d885361ba221b03605034261814e6c1c060c0125d58114a23c9334aa543079846052706459dce45f590e0f827bf794f3f751e24c224c06e3106cccf5c5dea93db5b0303"
9+
"3103003c0040ac54b5aabdd9f6413d6999b2079fab144b070e449ea81b11e181a3436eda4f7de3896b123d9855af92fe771e51368bc4dd00a79d1629fcb88d5269a8b34da52e1d3a150aeaa3e6747848d4eb5c6370ea60af21032cc618c7899f8eec2548b5b768ab9463758e79633e8741709e61f196556be9eddd81f6162d7dcb91be5df0d1e5e379806d0ef657edc161db2c0d8673f8eb4a2a31c0425e27857e79dbc787df858a3ae52a6e2add72e5b2d29bf7e4e3c0136d28b119681333599f408dabba49180df571f80ef44040f3ab134b2f1c5436d293bab22bfdab06b46cfad4428f0fff742b6127617c7cf84fbd4bc3dedf2f5c0c7170eb49c248a2545738cd95211a6c729cc72d2d4e9342e7a25be2b0d50bae2327ed2f15908559d98009fa0d174a004fb9f063d010f9620b694c950a675ab30a8a8126ce7f609a98d61cfaad777957feed5e6023b9689526d733a55844c53704292d82e9d3e7379983a38651d75847970743993713f72a61a4bc974453342454506c406e9231780e6a135c544e71411ba0fcab12a7330923794e58c0e0c8fe8d947b9d33c35e43e38e835c718640160c1a593d59a7978961c33c4d063e0e6e12bd8485f2853e11ef46ae5e2a5fa8189f66edf86c850bec11c85fc4bb9161b0918971621067e5b73db935e030543ea851608fae015e38dd50407857f5b2da6b61b0aac95db98f4bc136f5fe617bb03248597a0923ec35528e3e292cea455dd775dd48f33e4d1c1220d5a68141e673fb4db0a88d0279dab4cd0a06e7bceef66f418f5f50cec14d5d2420c41bd2ac5deffa2d7799dda044ae7ad9f0549d6751bea5cd4a4de320a54f581bd8fad620312777deb28381ba31812c519d7156be80c1afd905952b6725b4b819eea2b39de850e15160576dbb6c39407f21545014c9b8095dfed5c21e0941dcee06ca3e7176883337d098baf40dc4a9305de98d0130281a130c0af3e076308c892ed808e591e619bbb2a18351a3821383a24ee7e2909090f927bf794f3f751e248227c06f21612438a90a3196efcf5c5d9a93db5b0303"
1010
],
1111
"rawHeaders": {
1212
"access-control-allow-origin": "*",
1313
"cache-control": "s-maxage=300, stale-while-revalidate=600",
14-
"cf-ray": "93ed496b8e0a0fb1-LAX",
14+
"cf-ray": "943f41a4ed11a1a2-LAX",
1515
"connection": "close",
1616
"content-encoding": "br",
1717
"content-type": "application/json",
18-
"date": "Mon, 12 May 2025 22:17:32 GMT",
18+
"date": "Thu, 22 May 2025 21:02:46 GMT",
1919
"server": "cloudflare",
2020
"transfer-encoding": "chunked",
2121
"vary": "Accept-Encoding"

src/api/providers/fetchers/__tests__/fixtures/openrouter-models.json

Lines changed: 3 additions & 3 deletions
Large diffs are not rendered by default.

src/api/providers/fetchers/__tests__/openrouter.test.ts

Lines changed: 75 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
// npx jest src/api/providers/fetchers/__tests__/openrouter.test.ts
1+
// npx jest api/providers/fetchers/__tests__/fetchers.test.ts
22

3-
import path from "path"
3+
import * as path from "path"
44

55
import { back as nockBack } from "nock"
66

@@ -39,6 +39,77 @@ describe.skip("OpenRouter API", () => {
3939
"anthropic/claude-3.7-sonnet:thinking",
4040
])
4141

42+
expect(
43+
Object.entries(models)
44+
.filter(([_, model]) => model.thinking)
45+
.map(([id, _]) => id)
46+
.sort(),
47+
).toEqual(["anthropic/claude-3.7-sonnet:thinking"])
48+
49+
expect(
50+
Object.entries(models)
51+
.filter(([_, model]) => model.supportedParameters?.includes("reasoning"))
52+
.map(([id, _]) => id)
53+
.sort(),
54+
).toEqual([
55+
"agentica-org/deepcoder-14b-preview:free",
56+
"aion-labs/aion-1.0",
57+
"aion-labs/aion-1.0-mini",
58+
"anthropic/claude-3.7-sonnet",
59+
"anthropic/claude-3.7-sonnet:beta",
60+
"anthropic/claude-3.7-sonnet:thinking",
61+
"anthropic/claude-opus-4",
62+
"anthropic/claude-sonnet-4",
63+
"arliai/qwq-32b-arliai-rpr-v1:free",
64+
"cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
65+
"deepseek/deepseek-r1",
66+
"deepseek/deepseek-r1-distill-llama-70b",
67+
"deepseek/deepseek-r1-distill-llama-70b:free",
68+
"deepseek/deepseek-r1-distill-llama-8b",
69+
"deepseek/deepseek-r1-distill-qwen-1.5b",
70+
"deepseek/deepseek-r1-distill-qwen-14b",
71+
"deepseek/deepseek-r1-distill-qwen-14b:free",
72+
"deepseek/deepseek-r1-distill-qwen-32b",
73+
"deepseek/deepseek-r1-distill-qwen-32b:free",
74+
"deepseek/deepseek-r1-zero:free",
75+
"deepseek/deepseek-r1:free",
76+
"google/gemini-2.5-flash-preview-05-20",
77+
"google/gemini-2.5-flash-preview-05-20:thinking",
78+
"microsoft/mai-ds-r1:free",
79+
"microsoft/phi-4-reasoning-plus",
80+
"microsoft/phi-4-reasoning-plus:free",
81+
"microsoft/phi-4-reasoning:free",
82+
"moonshotai/kimi-vl-a3b-thinking:free",
83+
"nousresearch/deephermes-3-mistral-24b-preview:free",
84+
"open-r1/olympiccoder-32b:free",
85+
"openai/codex-mini",
86+
"openai/o1-pro",
87+
"perplexity/r1-1776",
88+
"perplexity/sonar-deep-research",
89+
"perplexity/sonar-reasoning",
90+
"perplexity/sonar-reasoning-pro",
91+
"qwen/qwen3-14b",
92+
"qwen/qwen3-14b:free",
93+
"qwen/qwen3-235b-a22b",
94+
"qwen/qwen3-235b-a22b:free",
95+
"qwen/qwen3-30b-a3b",
96+
"qwen/qwen3-30b-a3b:free",
97+
"qwen/qwen3-32b",
98+
"qwen/qwen3-32b:free",
99+
"qwen/qwen3-4b:free",
100+
"qwen/qwen3-8b",
101+
"qwen/qwen3-8b:free",
102+
"qwen/qwq-32b",
103+
"qwen/qwq-32b:free",
104+
"rekaai/reka-flash-3:free",
105+
"thudm/glm-z1-32b",
106+
"thudm/glm-z1-32b:free",
107+
"thudm/glm-z1-9b:free",
108+
"thudm/glm-z1-rumination-32b",
109+
"tngtech/deepseek-r1t-chimera:free",
110+
"x-ai/grok-3-mini-beta",
111+
])
112+
42113
expect(models["anthropic/claude-3.7-sonnet"]).toEqual({
43114
maxTokens: 8192,
44115
contextWindow: 200000,
@@ -51,6 +122,7 @@ describe.skip("OpenRouter API", () => {
51122
description: expect.any(String),
52123
thinking: false,
53124
supportsComputerUse: true,
125+
supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
54126
})
55127

56128
expect(models["anthropic/claude-3.7-sonnet:thinking"]).toEqual({
@@ -65,6 +137,7 @@ describe.skip("OpenRouter API", () => {
65137
description: expect.any(String),
66138
thinking: true,
67139
supportsComputerUse: true,
140+
supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
68141
})
69142

70143
const anthropicModels = Object.entries(models)

src/api/providers/fetchers/openrouter.ts

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import axios from "axios"
22
import { z } from "zod"
33

4+
import { isModelParameter } from "../../../schemas"
45
import { ApiHandlerOptions, ModelInfo, anthropicModels, COMPUTER_USE_MODELS } from "../../../shared/api"
56
import { parseApiPrice } from "../../../utils/cost"
67

@@ -38,6 +39,7 @@ export const openRouterModelSchema = modelRouterBaseModelSchema.extend({
3839
id: z.string(),
3940
architecture: openRouterArchitectureSchema.optional(),
4041
top_provider: z.object({ max_completion_tokens: z.number().nullish() }).optional(),
42+
supported_parameters: z.array(z.string()).optional(),
4143
})
4244

4345
export type OpenRouterModel = z.infer<typeof openRouterModelSchema>
@@ -72,6 +74,7 @@ const openRouterModelEndpointsResponseSchema = z.object({
7274
name: z.string(),
7375
description: z.string().optional(),
7476
architecture: openRouterArchitectureSchema.optional(),
77+
supported_parameters: z.array(z.string()).optional(),
7578
endpoints: z.array(openRouterModelEndpointSchema),
7679
}),
7780
})
@@ -96,13 +99,14 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions): Promise<
9699
}
97100

98101
for (const model of data) {
99-
const { id, architecture, top_provider } = model
102+
const { id, architecture, top_provider, supported_parameters = [] } = model
100103

101104
models[id] = parseOpenRouterModel({
102105
id,
103106
model,
104107
modality: architecture?.modality,
105108
maxTokens: id.startsWith("anthropic/") ? top_provider?.max_completion_tokens : 0,
109+
supportedParameters: supported_parameters,
106110
})
107111
}
108112
} catch (error) {
@@ -162,11 +166,13 @@ export const parseOpenRouterModel = ({
162166
model,
163167
modality,
164168
maxTokens,
169+
supportedParameters,
165170
}: {
166171
id: string
167172
model: OpenRouterBaseModel
168173
modality: string | null | undefined
169174
maxTokens: number | null | undefined
175+
supportedParameters?: string[]
170176
}): ModelInfo => {
171177
const cacheWritesPrice = model.pricing?.input_cache_write
172178
? parseApiPrice(model.pricing?.input_cache_write)
@@ -187,6 +193,7 @@ export const parseOpenRouterModel = ({
187193
cacheReadsPrice,
188194
description: model.description,
189195
thinking: id === "anthropic/claude-3.7-sonnet:thinking",
196+
supportedParameters: supportedParameters ? supportedParameters.filter(isModelParameter) : undefined,
190197
}
191198

192199
// The OpenRouter model definition doesn't give us any hints about

src/exports/roo-code.d.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,7 @@ type ProviderSettings = {
275275
cacheWritesPrice?: number | undefined
276276
cacheReadsPrice?: number | undefined
277277
description?: string | undefined
278+
supportedParameters?: ("max_tokens" | "temperature" | "reasoning" | "include_reasoning")[] | undefined
278279
reasoningEffort?: ("low" | "medium" | "high") | undefined
279280
thinking?: boolean | undefined
280281
minTokensPerCachePoint?: number | undefined
@@ -678,6 +679,9 @@ type IpcMessage =
678679
cacheWritesPrice?: number | undefined
679680
cacheReadsPrice?: number | undefined
680681
description?: string | undefined
682+
supportedParameters?:
683+
| ("max_tokens" | "temperature" | "reasoning" | "include_reasoning")[]
684+
| undefined
681685
reasoningEffort?: ("low" | "medium" | "high") | undefined
682686
thinking?: boolean | undefined
683687
minTokensPerCachePoint?: number | undefined
@@ -1153,6 +1157,9 @@ type TaskCommand =
11531157
cacheWritesPrice?: number | undefined
11541158
cacheReadsPrice?: number | undefined
11551159
description?: string | undefined
1160+
supportedParameters?:
1161+
| ("max_tokens" | "temperature" | "reasoning" | "include_reasoning")[]
1162+
| undefined
11561163
reasoningEffort?: ("low" | "medium" | "high") | undefined
11571164
thinking?: boolean | undefined
11581165
minTokensPerCachePoint?: number | undefined

src/exports/types.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,7 @@ type ProviderSettings = {
279279
cacheWritesPrice?: number | undefined
280280
cacheReadsPrice?: number | undefined
281281
description?: string | undefined
282+
supportedParameters?: ("max_tokens" | "temperature" | "reasoning" | "include_reasoning")[] | undefined
282283
reasoningEffort?: ("low" | "medium" | "high") | undefined
283284
thinking?: boolean | undefined
284285
minTokensPerCachePoint?: number | undefined
@@ -692,6 +693,9 @@ type IpcMessage =
692693
cacheWritesPrice?: number | undefined
693694
cacheReadsPrice?: number | undefined
694695
description?: string | undefined
696+
supportedParameters?:
697+
| ("max_tokens" | "temperature" | "reasoning" | "include_reasoning")[]
698+
| undefined
695699
reasoningEffort?: ("low" | "medium" | "high") | undefined
696700
thinking?: boolean | undefined
697701
minTokensPerCachePoint?: number | undefined
@@ -1169,6 +1173,9 @@ type TaskCommand =
11691173
cacheWritesPrice?: number | undefined
11701174
cacheReadsPrice?: number | undefined
11711175
description?: string | undefined
1176+
supportedParameters?:
1177+
| ("max_tokens" | "temperature" | "reasoning" | "include_reasoning")[]
1178+
| undefined
11721179
reasoningEffort?: ("low" | "medium" | "high") | undefined
11731180
thinking?: boolean | undefined
11741181
minTokensPerCachePoint?: number | undefined

src/schemas/index.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,19 @@ export const reasoningEffortsSchema = z.enum(reasoningEfforts)
171171

172172
export type ReasoningEffort = z.infer<typeof reasoningEffortsSchema>
173173

174+
/**
175+
* ModelParameter
176+
*/
177+
178+
export const modelParameters = ["max_tokens", "temperature", "reasoning", "include_reasoning"] as const
179+
180+
export const modelParametersSchema = z.enum(modelParameters)
181+
182+
export type ModelParameter = z.infer<typeof modelParametersSchema>
183+
184+
export const isModelParameter = (value: string): value is ModelParameter =>
185+
modelParameters.includes(value as ModelParameter)
186+
174187
/**
175188
* ModelInfo
176189
*/
@@ -187,6 +200,7 @@ export const modelInfoSchema = z.object({
187200
cacheWritesPrice: z.number().optional(),
188201
cacheReadsPrice: z.number().optional(),
189202
description: z.string().optional(),
203+
supportedParameters: z.array(modelParametersSchema).optional(),
190204
reasoningEffort: reasoningEffortsSchema.optional(),
191205
thinking: z.boolean().optional(),
192206
minTokensPerCachePoint: z.number().optional(),

src/shared/api.ts

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1921,10 +1921,8 @@ export const PROMPT_CACHING_MODELS = new Set([
19211921
"anthropic/claude-3.7-sonnet",
19221922
"anthropic/claude-3.7-sonnet:beta",
19231923
"anthropic/claude-3.7-sonnet:thinking",
1924-
"anthropic/claude-sonnet-4-20250514",
1925-
"anthropic/claude-sonnet-4-20250514:thinking",
1926-
"anthropic/claude-opus-4-20250514",
1927-
"anthropic/claude-opus-4-20250514:thinking",
1924+
"anthropic/claude-sonnet-4",
1925+
"anthropic/claude-opus-4",
19281926
"google/gemini-2.5-pro-preview",
19291927
"google/gemini-2.5-flash-preview",
19301928
"google/gemini-2.5-flash-preview:thinking",

0 commit comments

Comments
 (0)