Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 9551b1a

Browse files
authored
FIX: do not strip empty string during stream processing (#911)
Fixes issue in Open AI provider eating newlines and spaces
1 parent aef9a03 commit 9551b1a

File tree

2 files changed

+24
-1
lines changed

2 files changed

+24
-1
lines changed

lib/completions/open_ai_message_processor.rb

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,8 @@ def process_streamed_message(json)
6565
@tool.parameters = parsed_args
6666
rval = @tool
6767
@tool = nil
68-
elsif content.present?
68+
elsif !content.to_s.empty?
69+
# we don't want to strip empty content like "\n", do not use present?
6970
rval = content
7071
end
7172

spec/lib/completions/endpoints/open_ai_spec.rb

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -519,6 +519,28 @@ def request_body(prompt, stream: false, tool_call: false)
519519
expect(response).to eq(tool_calls)
520520
end
521521

522+
it "properly handles newlines" do
523+
response = <<~TEXT.strip
524+
data: {"id":"chatcmpl-ASngi346UA9k006bF6GBRV66tEJfQ","object":"chat.completion.chunk","created":1731427548,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_159d8341cc","choices":[{"index":0,"delta":{"content":":\\n\\n"},"logprobs":null,"finish_reason":null}],"usage":null}
525+
526+
data: {"id":"chatcmpl-ASngi346UA9k006bF6GBRV66tEJfQ","object":"chat.completion.chunk","created":1731427548,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_159d8341cc","choices":[{"index":0,"delta":{"content":"```"},"logprobs":null,"finish_reason":null}],"usage":null}
527+
528+
data: {"id":"chatcmpl-ASngi346UA9k006bF6GBRV66tEJfQ","object":"chat.completion.chunk","created":1731427548,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_159d8341cc","choices":[{"index":0,"delta":{"content":"ruby"},"logprobs":null,"finish_reason":null}],"usage":null}
529+
530+
data: {"id":"chatcmpl-ASngi346UA9k006bF6GBRV66tEJfQ","object":"chat.completion.chunk","created":1731427548,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_159d8341cc","choices":[{"index":0,"delta":{"content":"\\n"},"logprobs":null,"finish_reason":null}],"usage":null}
531+
532+
data: {"id":"chatcmpl-ASngi346UA9k006bF6GBRV66tEJfQ","object":"chat.completion.chunk","created":1731427548,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_159d8341cc","choices":[{"index":0,"delta":{"content":"def"},"logprobs":null,"finish_reason":null}],"usage":null}
533+
TEXT
534+
535+
open_ai_mock.stub_raw(response)
536+
partials = []
537+
538+
dialect = compliance.dialect(prompt: compliance.generic_prompt)
539+
endpoint.perform_completion!(dialect, user) { |partial| partials << partial }
540+
541+
expect(partials).to eq([":\n\n", "```", "ruby", "\n", "def"])
542+
end
543+
522544
it "uses proper token accounting" do
523545
response = <<~TEXT.strip
524546
data: {"id":"chatcmpl-9OZidiHncpBhhNMcqCus9XiJ3TkqR","object":"chat.completion.chunk","created":1715644203,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_729ea513f7","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}],"usage":null}|

0 commit comments

Comments
 (0)