Skip to content

Commit a0c2d0d

Browse files
committed
fix(openrouter): conditionally include max_tokens parameter
- Set default maxTokens to 0 in getOpenRouterModels - Only include max_tokens in API requests when value > 0 - Preserves OpenRouter defaults when no override exists
1 parent b57e148 commit a0c2d0d

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

src/api/providers/fetchers/openrouter.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions): Promise<
7272
typeof cacheWritesPrice !== "undefined" && typeof cacheReadsPrice !== "undefined"
7373

7474
const modelInfo: ModelInfo = {
75-
maxTokens: rawModel.top_provider?.max_completion_tokens,
75+
maxTokens: 0,
7676
contextWindow: rawModel.context_length,
7777
supportsImages: rawModel.architecture?.modality?.includes("image"),
7878
supportsPromptCache,

src/api/providers/openrouter.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
106106
// https://openrouter.ai/docs/transforms
107107
const completionParams: OpenRouterChatCompletionParams = {
108108
model: modelId,
109-
max_tokens: maxTokens,
109+
...(maxTokens && maxTokens > 0 && { max_tokens: maxTokens }),
110110
temperature,
111111
thinking, // OpenRouter is temporarily supporting this.
112112
top_p: topP,

0 commit comments

Comments
 (0)