Skip to content

Commit 674a691

Browse files
committed
Fix OpenAI Chat stream: prevent tool_calls[].type becoming "functionfunction..."
When merging streaming deltas, delta was converted with as_json.deep_symbolize_keys, which left tool_calls[].type as the string "function". hash_merge_delta concatenates when the existing value is a String (hash[key] += value), so each chunk appended "function" and produced "functionfunctionfunction...". - Use delta.deep_to_h so the gem returns symbol-keyed hashes with :type => :function. Merges then hit the replace branch and type stays :function. - Match when :function in process_function_calls for consistency.
1 parent b641710 commit 674a691

File tree

2 files changed

+128
-2
lines changed

2 files changed

+128
-2
lines changed

lib/active_agent/providers/open_ai/chat_provider.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def process_stream_chunk(api_response_event)
102102

103103
# If we have a delta, we need to update a message in the stack
104104
message = find_or_create_message(api_message.index)
105-
message = message_merge_delta(message, api_message.delta.as_json.deep_symbolize_keys)
105+
message_merge_delta(message, api_message.delta.deep_to_h)
106106

107107
# Stream back content changes as they come in
108108
if api_message.delta.content
@@ -138,7 +138,7 @@ def process_stream_chunk(api_response_event)
138138
def process_function_calls(api_function_calls)
139139
api_function_calls.each do |api_function_call|
140140
content = instrument("tool_call.active_agent", tool_name: api_function_call.dig(:function, :name)) do
141-
case api_function_call[:type]
141+
case api_function_call[:type].to_s
142142
when "function"
143143
process_tool_call_function(api_function_call[:function])
144144
else
Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
# frozen_string_literal: true
2+
3+
require "test_helper"
4+
5+
begin
6+
require "openai"
7+
rescue LoadError
8+
puts "OpenAI gem not available, skipping OpenAI Chat provider tests"
9+
return
10+
end
11+
12+
require_relative "../../../lib/active_agent/providers/open_ai/chat_provider"
13+
14+
module Providers
15+
module OpenAI
16+
module Chat
17+
class ChatProviderTest < ActiveSupport::TestCase
18+
19+
include WebMock::API
20+
21+
setup do
22+
WebMock.enable!
23+
@client = ::OpenAI::Client.new(base_url: "http://localhost", api_key: "test-key")
24+
end
25+
26+
teardown do
27+
WebMock.disable!
28+
end
29+
30+
test "accumulates streaming tool call deltas into message_stack" do
31+
stub_streaming_response(tool_calls_sse_response)
32+
33+
stream = @client.chat.completions.stream(
34+
messages: [{ content: "What's the weather in Boston?", role: :user }],
35+
model: "qwen-plus",
36+
tools: weather_tool
37+
)
38+
39+
chat_provider = ActiveAgent::Providers::OpenAI::ChatProvider.new
40+
41+
stream.each do |event|
42+
chat_provider.send(:process_stream_chunk, event)
43+
end
44+
45+
expected_message = {
46+
index: 0,
47+
role: :assistant,
48+
tool_calls: [
49+
{
50+
index: 0,
51+
id: "call_123",
52+
function: {
53+
name: "get_weather",
54+
arguments: '{"city":"Paris","units":"celsius"}'
55+
},
56+
type: :function
57+
}
58+
]
59+
}
60+
61+
assert_equal(
62+
[expected_message],
63+
chat_provider.send(:message_stack),
64+
"message_stack should contain one assistant message with merged tool_calls"
65+
)
66+
end
67+
68+
private
69+
70+
def stub_streaming_response(response_body, request_options = {})
71+
default_request = {
72+
messages: [{ content: "What's the weather in Boston?",role: "user" }],
73+
model: "qwen-plus",
74+
stream: true
75+
}
76+
77+
stub_request(:post, "http://localhost/chat/completions")
78+
.with(body: hash_including(default_request.merge(request_options)))
79+
.to_return(
80+
status: 200,
81+
headers: { "Content-Type" => "text/event-stream" },
82+
body: response_body
83+
)
84+
end
85+
86+
def tool_calls_sse_response
87+
<<~SSE
88+
data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1234567890,"model":"qwen-plus","choices":[{"index":0,"content":null,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_123","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]}
89+
90+
data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1234567890,"model":"qwen-plus","choices":[{"index":0,"content":null,"delta":{"tool_calls":[{"index":0,"type":"function","function":{"arguments":"{\\"city\\":"}}]},"finish_reason":null}]}
91+
92+
data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1234567890,"model":"qwen-plus","choices":[{"index":0,"content":null,"delta":{"tool_calls":[{"index":0,"type":"function","function":{"arguments":"\\"Paris\\","}}]},"finish_reason":null}]}
93+
94+
data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1234567890,"model":"qwen-plus","choices":[{"index":0,"content":null,"delta":{"tool_calls":[{"index":0,"type":"function","function":{"arguments":"\\"units\\":\\"celsius\\"}"}}]},"finish_reason":null}]}
95+
96+
data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1234567890,"model":"qwen-plus","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]}
97+
98+
data: [DONE]
99+
100+
SSE
101+
end
102+
103+
def weather_tool
104+
[
105+
{
106+
type: :function,
107+
function: {
108+
name: "get_weather",
109+
parameters: {
110+
type: "object",
111+
properties: {
112+
city: { type: "string" },
113+
units: { type: "string" }
114+
},
115+
required: ["city", "units"],
116+
additionalProperties: false
117+
},
118+
strict: true
119+
}
120+
}
121+
]
122+
end
123+
end
124+
end
125+
end
126+
end

0 commit comments

Comments
 (0)