Skip to content

Commit f263698

Browse files
committed
Fix: message.content as Array isn't compatible with some provider
1 parent 677e145 commit f263698

File tree

3 files changed

+128
-11
lines changed

3 files changed

+128
-11
lines changed

.changeset/good-owls-speak.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
Fix: message.content as Array isn't compatible with some provider

src/api/transform/__tests__/openai-format.test.ts

Lines changed: 107 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,113 @@ describe("convertToOpenAiMessages", () => {
2828
role: "assistant",
2929
content: "Hi there!",
3030
})
31+
32+
it("should split nonToolMessages array content into multiple user messages", () => {
33+
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
34+
{
35+
role: "user",
36+
content: [
37+
{ type: "text", text: "First message" },
38+
{ type: "text", text: "Second message" },
39+
],
40+
},
41+
]
42+
43+
const openAiMessages = convertToOpenAiMessages(anthropicMessages)
44+
expect(openAiMessages).toHaveLength(2)
45+
expect(openAiMessages[0]).toEqual({ role: "user", content: "First message" })
46+
expect(openAiMessages[1]).toEqual({ role: "user", content: "Second message" })
47+
})
48+
49+
it("should handle messages with image content", () => {
50+
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
51+
{
52+
role: "user",
53+
content: [
54+
{
55+
type: "text",
56+
text: "What is in this image?",
57+
},
58+
{
59+
type: "image",
60+
source: {
61+
type: "base64",
62+
media_type: "image/jpeg",
63+
data: "base64data",
64+
},
65+
},
66+
],
67+
},
68+
]
69+
70+
const openAiMessages = convertToOpenAiMessages(anthropicMessages)
71+
expect(openAiMessages).toHaveLength(2)
72+
expect(openAiMessages[0].role).toBe("user")
73+
expect(openAiMessages[0]).toEqual({ role: "user", content: "What is in this image?" })
74+
expect(openAiMessages[1]).toEqual({
75+
role: "user",
76+
content: "IMAGE: data:image/jpeg;base64,base64data",
77+
})
78+
})
79+
80+
it("should handle assistant messages with tool use", () => {
81+
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
82+
{
83+
role: "assistant",
84+
content: [
85+
{
86+
type: "text",
87+
text: "Let me check the weather.",
88+
},
89+
{
90+
type: "tool_use",
91+
id: "weather-123",
92+
name: "get_weather",
93+
input: { city: "London" },
94+
},
95+
],
96+
},
97+
]
98+
99+
const openAiMessages = convertToOpenAiMessages(anthropicMessages)
100+
expect(openAiMessages).toHaveLength(1)
101+
102+
const assistantMessage = openAiMessages[0] as OpenAI.Chat.ChatCompletionAssistantMessageParam
103+
expect(assistantMessage.role).toBe("assistant")
104+
expect(assistantMessage.content).toBe("Let me check the weather.")
105+
expect(assistantMessage.tool_calls).toHaveLength(1)
106+
expect(assistantMessage.tool_calls![0]).toEqual({
107+
id: "weather-123",
108+
type: "function",
109+
function: {
110+
name: "get_weather",
111+
arguments: JSON.stringify({ city: "London" }),
112+
},
113+
})
114+
})
115+
116+
it("should handle user messages with tool results", () => {
117+
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
118+
{
119+
role: "user",
120+
content: [
121+
{
122+
type: "tool_result",
123+
tool_use_id: "weather-123",
124+
content: "Current temperature in London: 20°C",
125+
},
126+
],
127+
},
128+
]
129+
130+
const openAiMessages = convertToOpenAiMessages(anthropicMessages)
131+
expect(openAiMessages).toHaveLength(1)
132+
133+
const toolMessage = openAiMessages[0] as OpenAI.Chat.ChatCompletionToolMessageParam
134+
expect(toolMessage.role).toBe("tool")
135+
expect(toolMessage.tool_call_id).toBe("weather-123")
136+
expect(toolMessage.content).toBe("Current temperature in London: 20°C")
137+
})
31138
})
32139

33140
it("should handle messages with image content", () => {

src/api/transform/openai-format.ts

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -79,17 +79,22 @@ export function convertToOpenAiMessages(
7979

8080
// Process non-tool messages
8181
if (nonToolMessages.length > 0) {
82-
openAiMessages.push({
83-
role: "user",
84-
content: nonToolMessages.map((part) => {
85-
if (part.type === "image") {
86-
return {
87-
type: "image_url",
88-
image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` },
89-
}
90-
}
91-
return { type: "text", text: part.text }
92-
}),
82+
// Fix content being an array by splitting it into multiple string messages.
83+
// Some providers do not recognize content of the form `content: [{xxx}, {xxx}]`, for example:
84+
// - ByteDance Volcano Engine https://www.volcengine.com/
85+
// - Baidu Intelligent Cloud https://cloud.baidu.com
86+
nonToolMessages.forEach((part) => {
87+
if (part.type === "image") {
88+
openAiMessages.push({
89+
role: "user",
90+
content: `IMAGE: data:${part.source.media_type};base64,${part.source.data}`,
91+
})
92+
} else {
93+
openAiMessages.push({
94+
role: "user",
95+
content: part.text,
96+
})
97+
}
9398
})
9499
}
95100
} else if (anthropicMessage.role === "assistant") {

0 commit comments

Comments
 (0)