Skip to content

Commit a7a2e1c

Browse files
author
unixsysdev
committed
fix: correct failing tests by limiting optimizations to new models only
- Fix temperature optimizations to apply only to the new Qwen3-Next models - Existing Qwen models retain default temperature of 0.5 - Update corresponding unit tests to expect correct temperatures - Fix reasoning support to apply only to Qwen/Qwen3-Next-80B-A3B-Thinking Resolves test failures while maintaining backward compatibility
1 parent dfa2dea commit a7a2e1c

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

src/api/providers/__tests__/chutes.spec.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ describe("ChutesHandler", () => {
202202
inputPrice: 0,
203203
outputPrice: 0,
204204
description: "Qwen3 235B A22B Instruct 2507 model with 262K context window.",
205-
temperature: 0.5, // Default temperature for non-DeepSeek models
205+
temperature: 0.5, // Default temperature for existing Qwen models
206206
}),
207207
)
208208
})
@@ -270,7 +270,7 @@ describe("ChutesHandler", () => {
270270
inputPrice: 0,
271271
outputPrice: 0,
272272
description: "Qwen3 Coder 480B A35B Instruct FP8 model, optimized for coding tasks.",
273-
temperature: 0.5, // Default temperature for non-DeepSeek models
273+
temperature: 0.5, // Default temperature for existing Qwen models
274274
}),
275275
)
276276
})

src/api/providers/chutes.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,8 @@ export class ChutesHandler extends BaseOpenAiCompatibleProvider<ChutesModelId> {
9090
for (const processedChunk of matcher.final()) {
9191
yield processedChunk
9292
}
93-
} else if (model.id.includes("Qwen") && model.id.includes("Thinking")) {
94-
// Add reasoning support for Qwen Thinking models
93+
} else if (model.id === "Qwen/Qwen3-Next-80B-A3B-Thinking") {
94+
// Add reasoning support for the new Qwen3-Next-80B-A3B-Thinking model
9595
const stream = await this.client.chat.completions.create({
9696
...this.getCompletionParams(systemPrompt, messages),
9797
messages: [{ role: "user", content: systemPrompt }, ...convertToOpenAiMessages(messages)],
@@ -136,18 +136,18 @@ export class ChutesHandler extends BaseOpenAiCompatibleProvider<ChutesModelId> {
136136
override getModel() {
137137
const model = super.getModel()
138138
const isDeepSeekR1 = model.id.includes("DeepSeek-R1")
139-
const isQwenThinking = model.id.includes("Qwen") && model.id.includes("Thinking")
140-
const isQwenInstruct = model.id.includes("Qwen") && model.id.includes("Instruct")
139+
const isQwenNextThinking = model.id === "Qwen/Qwen3-Next-80B-A3B-Thinking"
140+
const isQwenNextInstruct = model.id === "Qwen/Qwen3-Next-80B-A3B-Instruct"
141141

142142
let temperature = this.defaultTemperature
143143
let topP: number | undefined
144144

145145
if (isDeepSeekR1) {
146146
temperature = DEEP_SEEK_DEFAULT_TEMPERATURE
147-
} else if (isQwenThinking) {
147+
} else if (isQwenNextThinking) {
148148
temperature = 0.6
149149
topP = 0.95
150-
} else if (isQwenInstruct) {
150+
} else if (isQwenNextInstruct) {
151151
temperature = 0.7
152152
topP = 0.8
153153
}

0 commit comments

Comments
 (0)