Skip to content

Commit 470b78a

Browse files
committed
fix: handle Mistral thinking content as reasoning chunks
- Add TypeScript interfaces for Mistral content types (text and thinking) - Update createMessage to yield reasoning chunks for thinking content - Update completePrompt to filter out thinking content in non-streaming mode - Add comprehensive tests for reasoning content handling - Follow the pattern used by other providers (Anthropic, OpenAI, Gemini, etc.) Fixes #6842
1 parent 2a105a5 commit 470b78a

File tree

2 files changed

+169
-8
lines changed

2 files changed

+169
-8
lines changed

src/api/providers/__tests__/mistral.spec.ts

Lines changed: 137 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
// Mock Mistral client - must come before other imports
22
const mockCreate = vi.fn()
3+
const mockComplete = vi.fn()
34
vi.mock("@mistralai/mistralai", () => {
45
return {
56
Mistral: vi.fn().mockImplementation(() => ({
@@ -21,6 +22,17 @@ vi.mock("@mistralai/mistralai", () => {
2122
}
2223
return stream
2324
}),
25+
complete: mockComplete.mockImplementation(async (_options) => {
26+
return {
27+
choices: [
28+
{
29+
message: {
30+
content: "Test response",
31+
},
32+
},
33+
],
34+
}
35+
}),
2436
},
2537
})),
2638
}
@@ -29,7 +41,7 @@ vi.mock("@mistralai/mistralai", () => {
2941
import type { Anthropic } from "@anthropic-ai/sdk"
3042
import { MistralHandler } from "../mistral"
3143
import type { ApiHandlerOptions } from "../../../shared/api"
32-
import type { ApiStreamTextChunk } from "../../transform/stream"
44+
import type { ApiStreamTextChunk, ApiStreamReasoningChunk } from "../../transform/stream"
3345

3446
describe("MistralHandler", () => {
3547
let handler: MistralHandler
@@ -44,6 +56,7 @@ describe("MistralHandler", () => {
4456
}
4557
handler = new MistralHandler(mockOptions)
4658
mockCreate.mockClear()
59+
mockComplete.mockClear()
4760
})
4861

4962
describe("constructor", () => {
@@ -122,5 +135,128 @@ describe("MistralHandler", () => {
122135
mockCreate.mockRejectedValueOnce(new Error("API Error"))
123136
await expect(handler.createMessage(systemPrompt, messages).next()).rejects.toThrow("API Error")
124137
})
138+
139+
it("should handle thinking content as reasoning chunks", async () => {
140+
// Mock stream with thinking content
141+
mockCreate.mockImplementationOnce(async (_options) => {
142+
const stream = {
143+
[Symbol.asyncIterator]: async function* () {
144+
yield {
145+
data: {
146+
choices: [
147+
{
148+
delta: {
149+
content: [
150+
{ type: "thinking", text: "Let me think about this..." },
151+
{ type: "text", text: "Here's the answer" },
152+
],
153+
},
154+
index: 0,
155+
},
156+
],
157+
},
158+
}
159+
},
160+
}
161+
return stream
162+
})
163+
164+
const iterator = handler.createMessage(systemPrompt, messages)
165+
const results: (ApiStreamTextChunk | ApiStreamReasoningChunk)[] = []
166+
167+
for await (const chunk of iterator) {
168+
if ("text" in chunk) {
169+
results.push(chunk as ApiStreamTextChunk | ApiStreamReasoningChunk)
170+
}
171+
}
172+
173+
expect(results).toHaveLength(2)
174+
expect(results[0]).toEqual({ type: "reasoning", text: "Let me think about this..." })
175+
expect(results[1]).toEqual({ type: "text", text: "Here's the answer" })
176+
})
177+
178+
it("should handle mixed content arrays correctly", async () => {
179+
// Mock stream with mixed content
180+
mockCreate.mockImplementationOnce(async (_options) => {
181+
const stream = {
182+
[Symbol.asyncIterator]: async function* () {
183+
yield {
184+
data: {
185+
choices: [
186+
{
187+
delta: {
188+
content: [
189+
{ type: "text", text: "First text" },
190+
{ type: "thinking", text: "Some reasoning" },
191+
{ type: "text", text: "Second text" },
192+
],
193+
},
194+
index: 0,
195+
},
196+
],
197+
},
198+
}
199+
},
200+
}
201+
return stream
202+
})
203+
204+
const iterator = handler.createMessage(systemPrompt, messages)
205+
const results: (ApiStreamTextChunk | ApiStreamReasoningChunk)[] = []
206+
207+
for await (const chunk of iterator) {
208+
if ("text" in chunk) {
209+
results.push(chunk as ApiStreamTextChunk | ApiStreamReasoningChunk)
210+
}
211+
}
212+
213+
expect(results).toHaveLength(3)
214+
expect(results[0]).toEqual({ type: "text", text: "First text" })
215+
expect(results[1]).toEqual({ type: "reasoning", text: "Some reasoning" })
216+
expect(results[2]).toEqual({ type: "text", text: "Second text" })
217+
})
218+
})
219+
220+
describe("completePrompt", () => {
221+
it("should complete prompt successfully", async () => {
222+
const prompt = "Test prompt"
223+
const result = await handler.completePrompt(prompt)
224+
225+
expect(mockComplete).toHaveBeenCalledWith({
226+
model: mockOptions.apiModelId,
227+
messages: [{ role: "user", content: prompt }],
228+
temperature: 0,
229+
})
230+
231+
expect(result).toBe("Test response")
232+
})
233+
234+
it("should filter out thinking content in completePrompt", async () => {
235+
mockComplete.mockImplementationOnce(async (_options) => {
236+
return {
237+
choices: [
238+
{
239+
message: {
240+
content: [
241+
{ type: "thinking", text: "Let me think..." },
242+
{ type: "text", text: "Answer part 1" },
243+
{ type: "text", text: "Answer part 2" },
244+
],
245+
},
246+
},
247+
],
248+
}
249+
})
250+
251+
const prompt = "Test prompt"
252+
const result = await handler.completePrompt(prompt)
253+
254+
expect(result).toBe("Answer part 1Answer part 2")
255+
})
256+
257+
it("should handle errors in completePrompt", async () => {
258+
mockComplete.mockRejectedValueOnce(new Error("API Error"))
259+
await expect(handler.completePrompt("Test prompt")).rejects.toThrow("Mistral completion error: API Error")
260+
})
125261
})
126262
})

src/api/providers/mistral.ts

Lines changed: 32 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,19 @@ import { ApiStream } from "../transform/stream"
1111
import { BaseProvider } from "./base-provider"
1212
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
1313

14+
// Define TypeScript interfaces for Mistral content types
15+
interface MistralTextContent {
16+
type: "text"
17+
text: string
18+
}
19+
20+
interface MistralThinkingContent {
21+
type: "thinking"
22+
text: string
23+
}
24+
25+
type MistralContent = MistralTextContent | MistralThinkingContent | string
26+
1427
export class MistralHandler extends BaseProvider implements SingleCompletionHandler {
1528
protected options: ApiHandlerOptions
1629
private client: Mistral
@@ -52,15 +65,23 @@ export class MistralHandler extends BaseProvider implements SingleCompletionHand
5265
const delta = chunk.data.choices[0]?.delta
5366

5467
if (delta?.content) {
55-
let content: string = ""
56-
5768
if (typeof delta.content === "string") {
58-
content = delta.content
69+
// Handle string content as text
70+
yield { type: "text", text: delta.content }
5971
} else if (Array.isArray(delta.content)) {
60-
content = delta.content.map((c) => (c.type === "text" ? c.text : "")).join("")
72+
// Handle array of content blocks
73+
for (const c of delta.content as MistralContent[]) {
74+
if (typeof c === "object" && c !== null) {
75+
if (c.type === "thinking" && c.text) {
76+
// Handle thinking content as reasoning chunks
77+
yield { type: "reasoning", text: c.text }
78+
} else if (c.type === "text" && c.text) {
79+
// Handle text content normally
80+
yield { type: "text", text: c.text }
81+
}
82+
}
83+
}
6184
}
62-
63-
yield { type: "text", text: content }
6485
}
6586

6687
if (chunk.data.usage) {
@@ -97,7 +118,11 @@ export class MistralHandler extends BaseProvider implements SingleCompletionHand
97118
const content = response.choices?.[0]?.message.content
98119

99120
if (Array.isArray(content)) {
100-
return content.map((c) => (c.type === "text" ? c.text : "")).join("")
121+
// Only return text content, filter out thinking content for non-streaming
122+
return content
123+
.filter((c: MistralContent) => typeof c === "object" && c.type === "text")
124+
.map((c: MistralContent) => (c as MistralTextContent).text || "")
125+
.join("")
101126
}
102127

103128
return content || ""

0 commit comments

Comments
 (0)