Skip to content

Commit 5ab0854

Browse files
committed
fix(claude): track message_start event in streaming response
Add a `MessageStarted` flag to `ConvertOpenAIResponseToAnthropicParams` to ensure the `message_start` event is emitted only once during streaming. Refactor response handling to detect streaming mode via the `stream` field instead of the `object` type, simplifying the branching logic. Update the streaming conversion to set `MessageStarted` after sending the `message_start` event, preventing duplicate starts. These changes improve correctness of streaming response handling for Claude integration.
1 parent 15981aa commit 5ab0854

File tree

1 file changed

+8
-13
lines changed

1 file changed

+8
-13
lines changed

internal/translator/openai/claude/openai_claude_response.go

Lines changed: 8 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@ type ConvertOpenAIResponseToAnthropicParams struct {
3737
ContentBlocksStopped bool
3838
// Track if message_delta has been sent
3939
MessageDeltaSent bool
40+
// Track if message_start has been sent
41+
MessageStarted bool
4042
}
4143

4244
// ToolCallAccumulator holds the state for accumulating tool call data
@@ -84,20 +86,12 @@ func ConvertOpenAIResponseToClaude(_ context.Context, _ string, originalRequestR
8486
return convertOpenAIDoneToAnthropic((*param).(*ConvertOpenAIResponseToAnthropicParams))
8587
}
8688

87-
root := gjson.ParseBytes(rawJSON)
88-
89-
// Check if this is a streaming chunk or non-streaming response
90-
objectType := root.Get("object").String()
91-
92-
if objectType == "chat.completion.chunk" {
93-
// Handle streaming response
94-
return convertOpenAIStreamingChunkToAnthropic(rawJSON, (*param).(*ConvertOpenAIResponseToAnthropicParams))
95-
} else if objectType == "chat.completion" {
96-
// Handle non-streaming response
89+
streamResult := gjson.GetBytes(originalRequestRawJSON, "stream")
90+
if !streamResult.Exists() || (streamResult.Exists() && streamResult.Type == gjson.False) {
9791
return convertOpenAINonStreamingToAnthropic(rawJSON)
92+
} else {
93+
return convertOpenAIStreamingChunkToAnthropic(rawJSON, (*param).(*ConvertOpenAIResponseToAnthropicParams))
9894
}
99-
100-
return []string{}
10195
}
10296

10397
// convertOpenAIStreamingChunkToAnthropic converts OpenAI streaming chunk to Anthropic streaming events
@@ -118,7 +112,7 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
118112

119113
// Check if this is the first chunk (has role)
120114
if delta := root.Get("choices.0.delta"); delta.Exists() {
121-
if role := delta.Get("role"); role.Exists() && role.String() == "assistant" {
115+
if role := delta.Get("role"); role.Exists() && role.String() == "assistant" && !param.MessageStarted {
122116
// Send message_start event
123117
messageStart := map[string]interface{}{
124118
"type": "message_start",
@@ -138,6 +132,7 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
138132
}
139133
messageStartJSON, _ := json.Marshal(messageStart)
140134
results = append(results, "event: message_start\ndata: "+string(messageStartJSON)+"\n\n")
135+
param.MessageStarted = true
141136

142137
// Don't send content_block_start for text here - wait for actual content
143138
}

0 commit comments

Comments
 (0)