diff --git a/.release-please-manifest.json b/.release-please-manifest.json index caf5ca3f..59acac47 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.26.0" + ".": "0.27.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 94c22ce3..e467bd35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,29 @@ # Changelog +## 0.27.0 (2025-09-26) + +Full Changelog: [v0.26.0...v0.27.0](https://github.com/openai/openai-ruby/compare/v0.26.0...v0.27.0) + +### Features + +* chat completion streaming helpers ([#828](https://github.com/openai/openai-ruby/issues/828)) ([6e98424](https://github.com/openai/openai-ruby/commit/6e9842485e819876dd6b78107fa45f1a5da67e4f)) + + +### Bug Fixes + +* **internal:** use null byte as file separator in the fast formatting script ([151ffe1](https://github.com/openai/openai-ruby/commit/151ffe10c9dc8d5edaf46de2a1c6b6e6fda80034)) +* shorten multipart boundary sep to less than RFC specificed max length ([d7770d1](https://github.com/openai/openai-ruby/commit/d7770d10ee3b093d8e2464b79e0e12be3a9d2beb)) + + +### Performance Improvements + +* faster code formatting ([67da711](https://github.com/openai/openai-ruby/commit/67da71139e5b572c97539299c39bae04c1d569fd)) + + +### Chores + +* allow fast-format to use bsd sed as well ([66ac913](https://github.com/openai/openai-ruby/commit/66ac913d195d8b5a5c4474ded88a5f9dad13b7b6)) + ## 0.26.0 (2025-09-23) Full Changelog: [v0.25.1...v0.26.0](https://github.com/openai/openai-ruby/compare/v0.25.1...v0.26.0) diff --git a/Gemfile.lock b/Gemfile.lock index 96026503..00a44544 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.26.0) + openai (0.27.0) connection_pool GEM diff --git a/README.md b/README.md index 97b13dbe..7a05c316 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.26.0" +gem "openai", "~> 0.27.0" ``` diff --git a/Rakefile b/Rakefile index bc850886..107bb325 100644 --- a/Rakefile +++ b/Rakefile @@ -12,6 +12,8 @@ tapioca = "sorbet/tapioca" examples = "examples" ignore_file = ".ignore" +FILES_ENV = "FORMAT_FILE" + CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file) CLOBBER.push(*%w[sorbet/rbi/annotations/ sorbet/rbi/gems/], tapioca) @@ -38,6 +40,14 @@ end xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] ruby_opt = {"RUBYOPT" => [ENV["RUBYOPT"], "--encoding=UTF-8"].compact.join(" ")} +filtered = ->(ext, dirs) do + if ENV.key?(FILES_ENV) + %w[sed -E -n -e] << "/\\.#{ext}$/p" << "--" << ENV.fetch(FILES_ENV) + else + (%w[find] + dirs + %w[-type f -and -name]) << "*.#{ext}" << "-print0" + end +end + desc("Lint `*.rb(i)`") multitask(:"lint:rubocop") do find = %w[find ./lib ./test ./rbi ./examples -type f -and ( -name *.rb -or -name *.rbi ) -print0] @@ -52,24 +62,26 @@ multitask(:"lint:rubocop") do sh("#{find.shelljoin} | #{lint.shelljoin}") end +norm_lines = %w[tr -- \n \0].shelljoin + desc("Format `*.rb`") multitask(:"format:rb") do # while `syntax_tree` is much faster than `rubocop`, `rubocop` is the only formatter with full syntax support - find = %w[find ./lib ./test ./examples -type f -and -name *.rb -print0] + files = filtered["rb", %w[./lib ./test ./examples]] fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --] - sh("#{find.shelljoin} | #{fmt.shelljoin}") + sh("#{files.shelljoin} | #{norm_lines} | #{fmt.shelljoin}") end desc("Format `*.rbi`") multitask(:"format:rbi") do - find = %w[find ./rbi -type f -and -name *.rbi -print0] + files = filtered["rbi", %w[./rbi]] fmt = xargs + %w[stree write --] - sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}") + sh(ruby_opt, "#{files.shelljoin} | #{norm_lines} | #{fmt.shelljoin}") end desc("Format `*.rbs`") multitask(:"format:rbs") do - find = %w[find ./sig -type f -name *.rbs -print0] + files = filtered["rbs", %w[./sig]] inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? ["-i", ""] : %w[-i] uuid = SecureRandom.uuid @@ -98,13 +110,13 @@ multitask(:"format:rbs") do success = false # transform class aliases to type aliases, which syntax tree has no trouble with - sh("#{find.shelljoin} | #{pre.shelljoin}") + sh("#{files.shelljoin} | #{norm_lines} | #{pre.shelljoin}") # run syntax tree to format `*.rbs` files - sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}") do + sh(ruby_opt, "#{files.shelljoin} | #{norm_lines} | #{fmt.shelljoin}") do success = _1 end # transform type aliases back to class aliases - sh("#{find.shelljoin} | #{pst.shelljoin}") + sh("#{files.shelljoin} | #{norm_lines} | #{pst.shelljoin}") # always run post-processing to remove comment marker fail unless success diff --git a/examples/chat/streaming_basic.rb b/examples/chat/streaming_basic.rb new file mode 100755 index 00000000..e8f949d1 --- /dev/null +++ b/examples/chat/streaming_basic.rb @@ -0,0 +1,23 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../../lib/openai" + +# gets API Key from environment variable `OPENAI_API_KEY` +client = OpenAI::Client.new + +stream = client.chat.completions.stream( + model: "gpt-4o-mini", + messages: [ + {role: :user, content: "Write a creative haiku about the ocean."} + ] +) + +stream.each do |event| + case event + when OpenAI::Streaming::ChatContentDeltaEvent + print(event.delta) + when OpenAI::Streaming::ChatContentDoneEvent + puts + end +end diff --git a/examples/chat/streaming_follow_up.rb b/examples/chat/streaming_follow_up.rb new file mode 100755 index 00000000..1e5c8d04 --- /dev/null +++ b/examples/chat/streaming_follow_up.rb @@ -0,0 +1,58 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../../lib/openai" + +# gets API Key from environment variable `OPENAI_API_KEY` +client = OpenAI::Client.new + +# This example demonstrates how to start a new streamed chat completion that includes prior turns by +# resending the conversation messages. +# +# 1. Start with an initial user turn and stream the assistant reply. +messages = [ + {role: :user, content: "Tell me a short story about a robot. Stop after 2 sentences."} +] + +puts "First streamed completion:" +assistant_text = "" + +stream1 = client.chat.completions.stream( + model: "gpt-4o-mini", + messages: messages +) + +stream1.each do |event| + case event + when OpenAI::Streaming::ChatContentDeltaEvent + assistant_text += event.delta + print(event.delta) + when OpenAI::Streaming::ChatContentDoneEvent + puts + end +end + +# 2. Start a new streamed completion that includes the prior assistant turn +# and adds a follow-up user instruction. +messages << {role: :assistant, content: assistant_text} +messages << {role: :user, content: "Continue the story with 2 more sentences while keeping the same style."} + +puts +puts "Second streamed completion (with prior turns included):" + +stream2 = client.chat.completions.stream( + model: "gpt-4o-mini", + messages: messages +) + +stream2.each do |event| + case event + when OpenAI::Streaming::ChatContentDeltaEvent + print(event.delta) + when OpenAI::Streaming::ChatContentDoneEvent + puts + end +end + +puts +puts "Done. The second stream is a new completion that used the prior turns as context." diff --git a/examples/chat/streaming_logprobs.rb b/examples/chat/streaming_logprobs.rb new file mode 100755 index 00000000..84130fa8 --- /dev/null +++ b/examples/chat/streaming_logprobs.rb @@ -0,0 +1,32 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../../lib/openai" + +# gets API Key from environment variable `OPENAI_API_KEY` +client = OpenAI::Client.new + +stream = client.chat.completions.stream( + model: "gpt-4o-mini", + logprobs: true, + top_logprobs: 3, + messages: [ + {role: :user, content: "Finish the sentence: The capital of France is"} + ] +) + +stream.each do |event| + case event + when OpenAI::Streaming::ChatContentDeltaEvent + print(event.delta) + when OpenAI::Streaming::ChatLogprobsContentDeltaEvent + # Print top logprobs for the last token in the delta + tokens = event.content + last = tokens.last + next unless last + alts = last.top_logprobs.map { |t| "#{t.token}=#{format('%.2f', t.logprob)}" }.join(", ") + puts("\nlogprobs: [#{alts}]") + when OpenAI::Streaming::ChatLogprobsContentDoneEvent + puts("\n--- logprobs collection finished (#{event.content.length} tokens) ---") + end +end diff --git a/examples/chat/streaming_multi_choice.rb b/examples/chat/streaming_multi_choice.rb new file mode 100755 index 00000000..6f7e4e92 --- /dev/null +++ b/examples/chat/streaming_multi_choice.rb @@ -0,0 +1,43 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../../lib/openai" + +# gets API Key from environment variable `OPENAI_API_KEY` +client = OpenAI::Client.new + +stream = client.chat.completions.stream( + model: "gpt-4o-mini", + n: 2, + messages: [ + {role: :user, content: "Give me two short taglines for a beach resort."} + ] +) + +choice_contents = {} +choice_finished = {} + +stream.each do |event| + case event + when OpenAI::Streaming::ChatChunkEvent + # Access the full snapshot with all choices: + event.snapshot.choices.each_with_index do |choice, index| + if choice.message.content + choice_contents[index] = choice.message.content + end + + next unless choice.finish_reason && !choice_finished[index] + choice_finished[index] = true + # Print the complete content for this choice when it finishes: + puts("[choice #{index}] complete:") + puts(choice_contents[index]) + puts("--- choice #{index} done ---") + puts + end + end +end + +puts("------ final choices ------") +choice_contents.keys.sort.each do |i| + puts("[#{i}] #{choice_contents[i]}") +end diff --git a/examples/chat/streaming_structured_outputs.rb b/examples/chat/streaming_structured_outputs.rb new file mode 100755 index 00000000..930a00b2 --- /dev/null +++ b/examples/chat/streaming_structured_outputs.rb @@ -0,0 +1,46 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../../lib/openai" + +class Step < OpenAI::BaseModel + required :explanation, String + required :output, String +end + +class MathResponse < OpenAI::BaseModel + required :steps, OpenAI::ArrayOf[Step] + required :final_answer, String +end + +client = OpenAI::Client.new + +stream = client.chat.completions.stream( + model: "gpt-4o-mini", + response_format: MathResponse, + messages: [ + {role: :user, content: "solve 8x + 31 = 2, show all steps"} + ] +) + +stream.each do |event| + case event + when OpenAI::Streaming::ChatContentDeltaEvent + print(event.delta) + when OpenAI::Streaming::ChatContentDoneEvent + puts + puts("--- parsed object ---") + pp(event.parsed) + end +end + +response = stream.get_final_completion + +puts +puts("----- parsed outputs from final response -----") +response + .choices + .each do |choice| + # parsed is an instance of `MathResponse` + pp(choice.message.parsed) + end diff --git a/examples/chat/streaming_text.rb b/examples/chat/streaming_text.rb new file mode 100755 index 00000000..8627884d --- /dev/null +++ b/examples/chat/streaming_text.rb @@ -0,0 +1,19 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../../lib/openai" + +# gets API Key from environment variable `OPENAI_API_KEY` +client = OpenAI::Client.new + +stream = client.chat.completions.stream( + model: "gpt-4o-mini", + messages: [ + {role: :user, content: "List three fun facts about dolphins."} + ] +) + +stream.text.each do |text| + print(text) +end +puts diff --git a/examples/chat/streaming_tools.rb b/examples/chat/streaming_tools.rb new file mode 100755 index 00000000..922163db --- /dev/null +++ b/examples/chat/streaming_tools.rb @@ -0,0 +1,32 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../../lib/openai" + +class GetWeather < OpenAI::BaseModel + required :location, String +end + +# gets API Key from environment variable `OPENAI_API_KEY` +client = OpenAI::Client.new + +stream = client.chat.completions.stream( + model: "gpt-4o-mini", + tools: [GetWeather], + messages: [ + {role: :user, content: "Call get_weather with location San Francisco in JSON."} + ] +) + +stream.each do |event| + case event + when OpenAI::Streaming::ChatFunctionToolCallArgumentsDeltaEvent + puts("delta: #{event.arguments_delta}") + pp(event.parsed) + when OpenAI::Streaming::ChatFunctionToolCallArgumentsDoneEvent + puts("--- Tool call finalized ---") + puts("name: #{event.name}") + puts("args: #{event.arguments}") + pp(event.parsed) + end +end diff --git a/helpers.md b/helpers.md index 3ba47c6a..01b5c58c 100644 --- a/helpers.md +++ b/helpers.md @@ -155,3 +155,244 @@ When `text` is provided: - The model is instructed to output valid JSON matching your schema - Text content is automatically parsed into instances of your type - Parsed objects are available via `event.parsed` on text done events + +## Chat Completions API + +```ruby +stream = client.chat.completions.stream( + model: "gpt-4", + messages: [{role: "user", content: "Tell me a story about programming"}] +) + +stream.each do |event| + case event + when OpenAI::Streaming::ChatContentDeltaEvent + print(event.delta) + end +end + +puts +``` + +`client.chat.completions.stream` returns a `ChatCompletionStream` that is an `Enumerable` emitting events. + +The stream will be cancelled when the block exits but you can also close it prematurely by calling `stream.close`. + +See an example of streaming helpers in action in [`examples/chat/streaming.rb`](examples/chat/streaming.rb). + +### Events + +The events listed here are just the event types that the SDK extends, for a full list of the events returned by the API, see [these docs](https://platform.openai.com/docs/api-reference/chat/streaming). + +```ruby +require "openai" + +client = OpenAI::Client.new + +stream = client.chat.completions.stream( + model: "gpt-4", + messages: [{role: "user", content: "Write a haiku"}] +) + +stream.each do |event| + case event + when OpenAI::Streaming::ChatContentDeltaEvent + print(event.delta) + when OpenAI::Streaming::ChatContentDoneEvent + puts("\n\nContent completed: #{event.content}") + when OpenAI::Streaming::ChatFunctionToolCallArgumentsDoneEvent + puts("\nTool call completed: #{event.name} with args: #{event.parsed}") + end +end + +# you can still get the accumulated final completion outside of +# the block, as long as the entire stream was consumed +# inside of the block +final_completion = stream.get_final_completion +puts("Final completion: #{final_completion.to_json}") +``` + +#### `ChatChunkEvent` + +This event wraps each raw chunk from the API along with the accumulated state up to that point. All other events are derived from processing these chunks. + +```ruby +when OpenAI::Streaming::ChatChunkEvent + event.type # :chunk + event.chunk # ChatCompletionChunk (raw API response) + event.snapshot # ParsedChatCompletion (accumulated state) +``` + +#### `ChatContentDeltaEvent` + +This event is yielded whenever a text content delta is returned by the API & includes the delta and the accumulated snapshot, e.g. + +```ruby +when OpenAI::Streaming::ChatContentDeltaEvent + event.type # :"content.delta" + event.delta # " world" + event.snapshot # "Hello world" + event.parsed # Your partially parsed model instance (when using structured outputs) +``` + +#### `ChatContentDoneEvent` + +This event is fired when text generation is complete & includes the full text and parsed content if using structured outputs. + +```ruby +when OpenAI::Streaming::ChatContentDoneEvent + event.type # :"content.done" + event.content # "Hello world" + event.parsed # Your parsed model instance (when using structured outputs) +``` + +#### `ChatRefusalDeltaEvent` + +This event is yielded whenever the assistant is refusing to fulfill a request & includes the delta and accumulated refusal message. + +```ruby +when OpenAI::Streaming::ChatRefusalDeltaEvent + event.type # :"refusal.delta" + event.delta # "I cannot" + event.snapshot # "I cannot help with that request" +``` + +#### `ChatRefusalDoneEvent` + +This event is fired when the assistant has finished generating a refusal message. + +```ruby +when OpenAI::Streaming::ChatRefusalDoneEvent + event.type # :"refusal.done" + event.refusal # "I cannot help with that request as it violates..." +``` + +#### `ChatFunctionToolCallArgumentsDeltaEvent` + +This event is yielded whenever function call arguments are being streamed & includes the delta and accumulated snapshot, e.g. + +```ruby +when OpenAI::Streaming::ChatFunctionToolCallArgumentsDeltaEvent + event.type # :"tool_calls.function.arguments.delta" + event.name # "get_weather" + event.index # 0 (tool call index in array) + event.arguments_delta # '{"location": "San' + event.arguments # '{"location": "San Francisco"' + event.parsed # {location: "San Francisco"} (if strict: true) +``` + +#### `ChatFunctionToolCallArgumentsDoneEvent` + +The event is fired when a function tool call's arguments are complete. + +```ruby +when OpenAI::Streaming::ChatFunctionToolCallArgumentsDoneEvent + event.type # :"tool_calls.function.arguments.done" + event.name # "get_weather" + event.index # 0 + event.arguments # '{"location": "San Francisco", "unit": "celsius"}' + event.parsed # {location: "San Francisco", unit: "celsius"} (for strict tools) +``` + +#### `ChatLogprobsContentDeltaEvent` + +This event is yielded when logprobs are requested and content tokens are being generated. + +```ruby +when OpenAI::Streaming::ChatLogprobsContentDeltaEvent + event.type # :"logprobs.content.delta" + event.content # Array of ChatCompletionTokenLogprob objects for new tokens + event.snapshot # Array of all logprobs accumulated so far +``` + +#### `ChatLogprobsContentDoneEvent` + +This event is fired when content generation is complete and logprobs were requested. + +```ruby +when OpenAI::Streaming::ChatLogprobsContentDoneEvent + event.type # :"logprobs.content.done" + event.content # Complete array of ChatCompletionTokenLogprob objects +``` + +#### `ChatLogprobsRefusalDeltaEvent` + +This event is yielded when logprobs are requested and refusal tokens are being generated. + +```ruby +when OpenAI::Streaming::ChatLogprobsRefusalDeltaEvent + event.type # :"logprobs.refusal.delta" + event.refusal # Array of ChatCompletionTokenLogprob objects for refusal tokens + event.snapshot # Array of all refusal logprobs accumulated so far +``` + +#### `ChatLogprobsRefusalDoneEvent` + +This event is fired when refusal generation is complete and logprobs were requested. + +```ruby +when OpenAI::Streaming::ChatLogprobsRefusalDoneEvent + event.type # :"logprobs.refusal.done" + event.refusal # Complete array of ChatCompletionTokenLogprob objects for refusal +``` + +### Methods + +Public Methods on the ChatCompletionStream class: + +#### `.text` + +Returns an enumerable that yields the text deltas from the stream. + +#### `.get_output_text` + +Blocks until the stream has been read to completion and returns all `content` deltas concatenated together. + +#### `.get_final_completion` + +Blocks until the stream has been read to completion and returns the accumulated `ParsedChatCompletion` object. + +#### `.current_completion_snapshot` + +Returns the current accumulated `ParsedChatCompletion` at any point during streaming. + +#### `.until_done` + +Blocks until the stream has been read to completion. + +#### `.close` + +Aborts the request. + +### Structured Outputs + +The Chat Completions API supports structured outputs via the `response_format` parameter: + +```ruby +class Haiku < OpenAI::BaseModel + field :first_line, String + field :second_line, String + field :third_line, String +end + +stream = client.chat.completions.stream( + model: "gpt-4", + messages: [{role: "user", content: "Write a haiku about Ruby"}], + response_format: Haiku +) + +stream.each do |event| + case event + when OpenAI::Streaming::ChatContentDoneEvent + haiku = event.parsed + puts("First line: #{haiku.first_line}") + puts("Second line: #{haiku.second_line}") + puts("Third line: #{haiku.third_line}") + end +end +``` + +When `response_format` is provided: +- The model is instructed to output valid JSON matching your schema +- Content is automatically parsed into instances of your type +- Parsed objects are available via `event.parsed` on content done events diff --git a/lib/openai.rb b/lib/openai.rb index a0018aa1..5c6a0daa 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -195,6 +195,7 @@ require_relative "openai/models/chat/chat_completion_audio" require_relative "openai/models/chat/chat_completion_audio_param" require_relative "openai/models/chat/chat_completion_chunk" +require_relative "openai/models/chat/parsed_chat_completion" require_relative "openai/models/chat/chat_completion_content_part" require_relative "openai/models/chat/chat_completion_content_part_image" require_relative "openai/models/chat/chat_completion_content_part_input_audio" @@ -697,6 +698,9 @@ require_relative "openai/resources/vector_stores/file_batches" require_relative "openai/resources/vector_stores/files" require_relative "openai/resources/webhooks" -require_relative "openai/helpers/streaming/events" +require_relative "openai/helpers/streaming/response_events" require_relative "openai/helpers/streaming/response_stream" +require_relative "openai/helpers/streaming/exceptions" +require_relative "openai/helpers/streaming/chat_events" +require_relative "openai/helpers/streaming/chat_completion_stream" require_relative "openai/streaming" diff --git a/lib/openai/helpers/streaming/chat_completion_stream.rb b/lib/openai/helpers/streaming/chat_completion_stream.rb new file mode 100644 index 00000000..69df1b0c --- /dev/null +++ b/lib/openai/helpers/streaming/chat_completion_stream.rb @@ -0,0 +1,683 @@ +# frozen_string_literal: true + +module OpenAI + module Helpers + module Streaming + class ChatCompletionStream + include OpenAI::Internal::Type::BaseStream + + def initialize(raw_stream:, response_format: nil, input_tools: nil) + @raw_stream = raw_stream + @state = ChatCompletionStreamState.new( + response_format: response_format, + input_tools: input_tools + ) + @iterator = iterator + end + + def get_final_completion + until_done + @state.get_final_completion + end + + def get_output_text + completion = get_final_completion + text_parts = [] + + completion.choices.each do |choice| + next unless choice.message.content + text_parts << choice.message.content + end + + text_parts.join + end + + def until_done + each {} # rubocop:disable Lint/EmptyBlock + self + end + + def current_completion_snapshot + @state.current_completion_snapshot + end + + def text + OpenAI::Internal::Util.chain_fused(@iterator) do |yielder| + @iterator.each do |event| + yielder << event.delta if event.is_a?(ChatContentDeltaEvent) + end + end + end + + private + + def iterator + @iterator ||= OpenAI::Internal::Util.chain_fused(@raw_stream) do |y| + @raw_stream.each do |raw_event| + next unless valid_chat_completion_chunk?(raw_event) + @state.handle_chunk(raw_event).each do |event| + y << event + end + end + end + end + + def valid_chat_completion_chunk?(sse_event) + # Although the _raw_stream is always supposed to contain only objects adhering to ChatCompletionChunk schema, + # this is broken by the Azure OpenAI in case of Asynchronous Filter enabled. + # An easy filter is to check for the "object" property: + # - should be "chat.completion.chunk" for a ChatCompletionChunk; + # - is an empty string for Asynchronous Filter events. + sse_event.object == :"chat.completion.chunk" + end + end + + class ChatCompletionStreamState + attr_reader :current_completion_snapshot + + def initialize(response_format: nil, input_tools: nil) + @current_completion_snapshot = nil + @choice_event_states = [] + @input_tools = Array(input_tools) + @response_format = response_format + @rich_response_format = response_format.is_a?(Class) ? response_format : nil + end + + def get_final_completion + parse_chat_completion( + chat_completion: current_completion_snapshot, + response_format: @rich_response_format + ) + end + + # Transforms raw streaming chunks into higher-level events that represent content changes, + # tool calls, and completion states. It maintains a running snapshot of the complete + # response by accumulating data from each chunk. + # + # The method performs the following steps: + # 1. Unwraps the chunk if it's wrapped in a ChatChunkEvent + # 2. Filters out non-ChatCompletionChunk objects + # 3. Accumulates the chunk data into the current completion snapshot + # 4. Generates appropriate events based on the chunk's content + def handle_chunk(chunk) + chunk = chunk.chunk if chunk.is_a?(ChatChunkEvent) + + return [] unless chunk.is_a?(OpenAI::Chat::ChatCompletionChunk) + + @current_completion_snapshot = accumulate_chunk(chunk) + build_events(chunk: chunk, completion_snapshot: @current_completion_snapshot) + end + + private + + def get_choice_state(choice) + index = choice.index + @choice_event_states[index] ||= ChoiceEventState.new(input_tools: @input_tools) + end + + def accumulate_chunk(chunk) + if @current_completion_snapshot.nil? + return convert_initial_chunk_into_snapshot(chunk) + end + + completion_snapshot = @current_completion_snapshot + + chunk.choices.each do |choice| + accumulate_choice!(choice, completion_snapshot) + end + + completion_snapshot.usage = chunk.usage if chunk.usage + completion_snapshot.system_fingerprint = chunk.system_fingerprint if chunk.system_fingerprint + + completion_snapshot + end + + def accumulate_choice!(choice, completion_snapshot) + choice_snapshot = completion_snapshot.choices[choice.index] + + if choice_snapshot.nil? + choice_snapshot = create_new_choice_snapshot(choice) + completion_snapshot.choices[choice.index] = choice_snapshot + else + update_existing_choice_snapshot(choice, choice_snapshot) + end + + if choice.finish_reason + choice_snapshot.finish_reason = choice.finish_reason + handle_finish_reason(choice.finish_reason) + end + + parse_tool_calls!(choice.delta.tool_calls, choice_snapshot.message.tool_calls) + + accumulate_logprobs!(choice.logprobs, choice_snapshot) + end + + def create_new_choice_snapshot(choice) + OpenAI::Internal::Type::Converter.coerce( + OpenAI::Models::Chat::ParsedChoice, + choice.to_h.except(:delta).merge(message: choice.delta.to_h) + ) + end + + def update_existing_choice_snapshot(choice, choice_snapshot) + delta_data = model_dump(choice.delta) + message_hash = model_dump(choice_snapshot.message) + + accumulated_data = accumulate_delta(message_hash, delta_data) + + choice_snapshot.message = OpenAI::Internal::Type::Converter.coerce( + OpenAI::Chat::ChatCompletionMessage, + accumulated_data + ) + end + + def build_events(chunk:, completion_snapshot:) + chunk_event = ChatChunkEvent.new( + type: :chunk, + chunk: chunk, + snapshot: completion_snapshot + ) + + choice_events = chunk.choices.flat_map do |choice| + build_choice_events(choice, completion_snapshot) + end + + [chunk_event] + choice_events + end + + def build_choice_events(choice, completion_snapshot) + choice_state = get_choice_state(choice) + choice_snapshot = completion_snapshot.choices[choice.index] + + content_delta_events(choice, choice_snapshot) + + tool_call_delta_events(choice, choice_snapshot) + + logprobs_delta_events(choice, choice_snapshot) + + choice_state.get_done_events( + choice_chunk: choice, + choice_snapshot: choice_snapshot, + response_format: @response_format + ) + end + + def content_delta_events(choice, choice_snapshot) + events = [] + + if choice.delta.content && choice_snapshot.message.content + events << ChatContentDeltaEvent.new( + type: :"content.delta", + delta: choice.delta.content, + snapshot: choice_snapshot.message.content, + parsed: choice_snapshot.message.parsed + ) + end + + if choice.delta.refusal && choice_snapshot.message.refusal + events << ChatRefusalDeltaEvent.new( + type: :"refusal.delta", + delta: choice.delta.refusal, + snapshot: choice_snapshot.message.refusal + ) + end + + events + end + + def tool_call_delta_events(choice, choice_snapshot) + events = [] + return events unless choice.delta.tool_calls + + tool_calls = choice_snapshot.message.tool_calls + return events unless tool_calls + + choice.delta.tool_calls.each do |tool_call_delta| + tool_call = tool_calls[tool_call_delta.index] + next unless tool_call.type == :function && tool_call_delta.function + + parsed_args = if tool_call.function.respond_to?(:parsed) + tool_call.function.parsed + end + events << ChatFunctionToolCallArgumentsDeltaEvent.new( + type: :"tool_calls.function.arguments.delta", + name: tool_call.function.name, + index: tool_call_delta.index, + arguments: tool_call.function.arguments, + parsed: parsed_args, + arguments_delta: tool_call_delta.function.arguments || "" + ) + end + + events + end + + def logprobs_delta_events(choice, choice_snapshot) + events = [] + return events unless choice.logprobs && choice_snapshot.logprobs + + if choice.logprobs.content && choice_snapshot.logprobs.content + events << ChatLogprobsContentDeltaEvent.new( + type: :"logprobs.content.delta", + content: choice.logprobs.content, + snapshot: choice_snapshot.logprobs.content + ) + end + + if choice.logprobs.refusal && choice_snapshot.logprobs.refusal + events << ChatLogprobsRefusalDeltaEvent.new( + type: :"logprobs.refusal.delta", + refusal: choice.logprobs.refusal, + snapshot: choice_snapshot.logprobs.refusal + ) + end + + events + end + + def handle_finish_reason(finish_reason) + return unless parseable_input? + + case finish_reason + when :length + raise LengthFinishReasonError.new(completion: @chat_completion) + when :content_filter + raise ContentFilterFinishReasonError.new + end + end + + def parse_tool_calls!(delta_tool_calls, snapshot_tool_calls) + return unless delta_tool_calls && snapshot_tool_calls + + delta_tool_calls.each do |tool_call_chunk| + tool_call_snapshot = snapshot_tool_calls[tool_call_chunk.index] + next unless tool_call_snapshot&.type == :function + + input_tool = find_input_tool(tool_call_snapshot.function.name) + next unless input_tool&.dig(:function, :strict) + next unless tool_call_snapshot.function.arguments + + begin + tool_call_snapshot.function.parsed = JSON.parse( + tool_call_snapshot.function.arguments, + symbolize_names: true + ) + rescue JSON::ParserError + nil + end + end + end + + def accumulate_logprobs!(choice_logprobs, choice_snapshot) + return unless choice_logprobs + + if choice_snapshot.logprobs.nil? + choice_snapshot.logprobs = OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs.new( + content: choice_logprobs.content, + refusal: choice_logprobs.refusal + ) + else + if choice_logprobs.content + choice_snapshot.logprobs.content ||= [] + choice_snapshot.logprobs.content.concat(choice_logprobs.content) + end + + if choice_logprobs.refusal + choice_snapshot.logprobs.refusal ||= [] + choice_snapshot.logprobs.refusal.concat(choice_logprobs.refusal) + end + end + end + + def parse_chat_completion(chat_completion:, response_format:) + choices = chat_completion.choices.map do |choice| + if parseable_input? + case choice.finish_reason + when :length + raise LengthFinishReasonError.new(completion: chat_completion) + when :content_filter + raise ContentFilterFinishReasonError.new + end + end + + build_parsed_choice(choice, response_format) + end + + OpenAI::Internal::Type::Converter.coerce( + OpenAI::Chat::ParsedChatCompletion, + chat_completion.to_h.merge(choices: choices) + ) + end + + def build_parsed_choice(choice, response_format) + message = choice.message + + tool_calls = parse_choice_tool_calls(message.tool_calls) + + choice_data = model_dump(choice) + choice_data[:message] = model_dump(message) + choice_data[:message][:tool_calls] = tool_calls && !tool_calls.empty? ? tool_calls : nil + + if response_format && message.content && !message.refusal + choice_data[:message][:parsed] = parse_content(response_format, message) + end + + choice_data + end + + def parse_choice_tool_calls(tool_calls) + return unless tool_calls + + tool_calls.map do |tool_call| + tool_call_hash = model_dump(tool_call) + next tool_call_hash unless tool_call_hash[:type] == :function && tool_call_hash[:function] + + function = tool_call_hash[:function] + parsed_args = parse_function_tool_arguments(function) + function[:parsed] = parsed_args if parsed_args + + tool_call_hash + end + end + + def parseable_input? + @response_format || @input_tools.any? + end + + def model_dump(obj) + if obj.is_a?(OpenAI::Internal::Type::BaseModel) + obj.deep_to_h + elsif obj.respond_to?(:to_h) + obj.to_h + else + obj + end + end + + def find_input_tool(name) + @input_tools.find { |tool| tool.dig(:function, :name) == name } + end + + def parse_function_tool_arguments(function) + return nil unless function[:arguments] + + input_tool = find_input_tool(function[:name]) + return nil unless input_tool&.dig(:function, :strict) + + parsed = JSON.parse(function[:arguments], symbolize_names: true) + return nil unless parsed + + model_class = input_tool[:model] || input_tool.dig(:function, :parameters) + if model_class.is_a?(Class) + OpenAI::Internal::Type::Converter.coerce(model_class, parsed) + else + parsed + end + rescue JSON::ParserError + nil + end + + def parse_content(response_format, message) + return nil unless message.content && !message.refusal + + parsed = JSON.parse(message.content, symbolize_names: true) + return nil unless parsed + + if response_format.is_a?(Class) + OpenAI::Internal::Type::Converter.coerce(response_format, parsed) + else + parsed + end + rescue JSON::ParserError + nil + end + + def convert_initial_chunk_into_snapshot(chunk) + data = chunk.to_h + + choices = [] + chunk.choices.each do |choice| + choice_hash = choice.to_h + delta_hash = choice.delta.to_h + + message_data = delta_hash.dup + message_data[:role] ||= :assistant + + choice_data = { + index: choice_hash[:index], + message: message_data, + finish_reason: choice_hash[:finish_reason], + logprobs: choice_hash[:logprobs] + } + choices << choice_data + end + + OpenAI::Internal::Type::Converter.coerce( + OpenAI::Chat::ParsedChatCompletion, + { + id: data[:id], + object: :"chat.completion", + created: data[:created], + model: data[:model], + choices: choices, + usage: data[:usage], + system_fingerprint: nil, + service_tier: data[:service_tier] + } + ) + end + + def accumulate_delta(acc, delta) + return acc if delta.nil? + + delta.each do |key, delta_value| # rubocop:disable Metrics/BlockLength + key = key.to_sym if key.is_a?(String) + + unless acc.key?(key) + acc[key] = delta_value + next + end + + acc_value = acc[key] + if acc_value.nil? + acc[key] = delta_value + next + end + + # Special properties that should be replaced, not accumulated. + if [:index, :type, :parsed].include?(key) + acc[key] = delta_value + next + end + + if acc_value.is_a?(String) && delta_value.is_a?(String) + acc[key] = acc_value + delta_value + elsif acc_value.is_a?(Numeric) && delta_value.is_a?(Numeric) # rubocop:disable Lint/DuplicateBranch + acc[key] = acc_value + delta_value + elsif acc_value.is_a?(Hash) && delta_value.is_a?(Hash) + acc[key] = accumulate_delta(acc_value, delta_value) + elsif acc_value.is_a?(Array) && delta_value.is_a?(Array) + if acc_value.all? { |x| x.is_a?(String) || x.is_a?(Numeric) } + acc_value.concat(delta_value) + next + end + + delta_value.each do |delta_entry| + unless delta_entry.is_a?(Hash) + raise TypeError, + "Unexpected list delta entry is not a hash: #{delta_entry}" + end + + index = delta_entry[:index] || delta_entry["index"] + if index.nil? + raise RuntimeError, + "Expected list delta entry to have an `index` key; #{delta_entry}" + end + unless index.is_a?(Integer) + raise TypeError, + "Unexpected, list delta entry `index` value is not an integer; #{index}" + end + + if acc_value[index].nil? + acc_value[index] = delta_entry + elsif acc_value[index].is_a?(Hash) + acc_value[index] = accumulate_delta(acc_value[index], delta_entry) + end + end + else + acc[key] = acc_value + end + end + + acc + end + end + + class ChoiceEventState + def initialize(input_tools:) + @input_tools = Array(input_tools) + @content_done = false + @refusal_done = false + @logprobs_content_done = false + @logprobs_refusal_done = false + @done_tool_calls = Set.new + @current_tool_call_index = nil + end + + def get_done_events(choice_chunk:, choice_snapshot:, response_format:) + events = [] + + if choice_snapshot.finish_reason + events.concat(content_done_events(choice_snapshot, response_format)) + + if @current_tool_call_index && !@done_tool_calls.include?(@current_tool_call_index) + event = tool_done_event(choice_snapshot, @current_tool_call_index) + events << event if event + end + end + + Array(choice_chunk.delta.tool_calls).each do |tool_call| + if @current_tool_call_index != tool_call.index + events.concat(content_done_events(choice_snapshot, response_format)) + + if @current_tool_call_index + event = tool_done_event(choice_snapshot, @current_tool_call_index) + events << event if event + end + end + + @current_tool_call_index = tool_call.index + end + + events + end + + private + + def content_done_events(choice_snapshot, response_format) + events = [] + + if choice_snapshot.message.content && !@content_done + @content_done = true + parsed = parse_content(choice_snapshot.message, response_format) + choice_snapshot.message.parsed = parsed + + events << ChatContentDoneEvent.new( + type: :"content.done", + content: choice_snapshot.message.content, + parsed: parsed + ) + end + + if choice_snapshot.message.refusal && !@refusal_done + @refusal_done = true + events << ChatRefusalDoneEvent.new( + type: :"refusal.done", + refusal: choice_snapshot.message.refusal + ) + end + + events + logprobs_done_events(choice_snapshot) + end + + def logprobs_done_events(choice_snapshot) + events = [] + logprobs = choice_snapshot.logprobs + return events unless logprobs + + if logprobs.content&.any? && !@logprobs_content_done + @logprobs_content_done = true + events << ChatLogprobsContentDoneEvent.new( + type: :"logprobs.content.done", + content: logprobs.content + ) + end + + if logprobs.refusal&.any? && !@logprobs_refusal_done + @logprobs_refusal_done = true + events << ChatLogprobsRefusalDoneEvent.new( + type: :"logprobs.refusal.done", + refusal: logprobs.refusal + ) + end + + events + end + + def tool_done_event(choice_snapshot, tool_index) + return nil if @done_tool_calls.include?(tool_index) + + @done_tool_calls.add(tool_index) + + tool_call = choice_snapshot.message.tool_calls&.[](tool_index) + return nil unless tool_call&.type == :function + + parsed_args = parse_function_tool_arguments(tool_call.function) + + if tool_call.function.respond_to?(:parsed=) + tool_call.function.parsed = parsed_args + end + + ChatFunctionToolCallArgumentsDoneEvent.new( + type: :"tool_calls.function.arguments.done", + index: tool_index, + name: tool_call.function.name, + arguments: tool_call.function.arguments, + parsed: parsed_args + ) + end + + def parse_content(message, response_format) + return nil unless response_format && message.content + + parsed = JSON.parse(message.content, symbolize_names: true) + if response_format.is_a?(Class) + OpenAI::Internal::Type::Converter.coerce(response_format, parsed) + else + parsed + end + rescue JSON::ParserError + nil + end + + def parse_function_tool_arguments(function) + return nil unless function.arguments + + tool = find_input_tool(function.name) + return nil unless tool&.dig(:function, :strict) + + parsed = JSON.parse(function.arguments, symbolize_names: true) + + if tool[:model] + OpenAI::Internal::Type::Converter.coerce(tool[:model], parsed) + else + parsed + end + rescue JSON::ParserError + nil + end + + def find_input_tool(name) + @input_tools.find { |tool| tool.dig(:function, :name) == name } + end + end + end + end +end diff --git a/lib/openai/helpers/streaming/chat_events.rb b/lib/openai/helpers/streaming/chat_events.rb new file mode 100644 index 00000000..8d3e9581 --- /dev/null +++ b/lib/openai/helpers/streaming/chat_events.rb @@ -0,0 +1,181 @@ +# frozen_string_literal: true + +module OpenAI + module Helpers + module Streaming + # Raw streaming chunk event with accumulated completion snapshot. + # + # This is the fundamental event that wraps each raw chunk from the API + # along with the accumulated state up to that point. All other events + # are derived from processing these chunks. + # + # @example + # event.chunk # => ChatCompletionChunk (raw API response) + # event.snapshot # => ParsedChatCompletion (accumulated state) + class ChatChunkEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :chunk + required :chunk, -> { OpenAI::Chat::ChatCompletionChunk } + required :snapshot, -> { OpenAI::Chat::ParsedChatCompletion } + end + + # Incremental text content update event. + # + # Emitted as the assistant's text response is being generated. Each event + # contains the new text fragment (delta) and the complete accumulated + # text so far (snapshot). + # + # @example + # event.delta # => "Hello" (new fragment) + # event.snapshot # => "Hello world" (accumulated text) + # event.parsed # => {name: "John"} (if using structured outputs) + class ChatContentDeltaEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"content.delta" + required :delta, String + required :snapshot, String + optional :parsed, Object # Partially parsed structured output + end + + # Text content completion event. + # + # Emitted when the assistant has finished generating text content. + # Contains the complete text and, if applicable, the fully parsed + # structured output. + # + # @example + # event.content # => "Hello world! How can I help?" + # event.parsed # => {name: "John", age: 30} (if using structured outputs) + class ChatContentDoneEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"content.done" + required :content, String + optional :parsed, Object # Fully parsed structured output + end + + # Incremental refusal update event. + # + # Emitted when the assistant is refusing to fulfill a request. + # Contains the new refusal text fragment and accumulated refusal message. + # + # @example + # event.delta # => "I cannot" + # event.snapshot # => "I cannot help with that request" + class ChatRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"refusal.delta" + required :delta, String + required :snapshot, String + end + + # Refusal completion event. + # + # Emitted when the assistant has finished generating a refusal message. + # Contains the complete refusal text. + # + # @example + # event.refusal # => "I cannot help with that request as it violates..." + class ChatRefusalDoneEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"refusal.done" + required :refusal, String + end + + # Incremental function tool call arguments update. + # + # Emitted as function arguments are being streamed. Provides both the + # raw JSON fragments and incrementally parsed arguments for strict tools. + # + # @example + # event.name # => "get_weather" + # event.index # => 0 (tool call index in array) + # event.arguments_delta # => '{"location": "San' (new fragment) + # event.arguments # => '{"location": "San Francisco"' (accumulated JSON) + # event.parsed # => {location: "San Francisco"} (if strict: true) + class ChatFunctionToolCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"tool_calls.function.arguments.delta" + required :name, String + required :index, Integer + required :arguments_delta, String + required :arguments, String + required :parsed, Object + end + + # Function tool call arguments completion event. + # + # Emitted when a function tool call's arguments are complete. + # For tools defined with `strict: true`, the arguments will be fully + # parsed and validated. For non-strict tools, only raw JSON is available. + # + # @example With strict tool + # event.name # => "get_weather" + # event.arguments # => '{"location": "San Francisco", "unit": "celsius"}' + # event.parsed # => {location: "San Francisco", unit: "celsius"} + # + # @example Without strict tool + # event.parsed # => nil (parse JSON from event.arguments manually) + class ChatFunctionToolCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"tool_calls.function.arguments.done" + required :name, String + required :index, Integer + required :arguments, String + required :parsed, Object # (only for strict: true tools) + end + + # Incremental logprobs update for content tokens. + # + # Emitted when logprobs are requested and content tokens are being generated. + # Contains log probability information for the new tokens and accumulated + # logprobs for all content tokens so far. + # + # @example + # event.content[0].token # => "Hello" + # event.content[0].logprob # => -0.31725305 + # event.content[0].top_logprobs # => [{token: "Hello", logprob: -0.31725305}, ...] + # event.snapshot # => [all logprobs accumulated so far] + class ChatLogprobsContentDeltaEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"logprobs.content.delta" + required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] } + required :snapshot, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] } + end + + # Logprobs completion event for content tokens. + # + # Emitted when content generation is complete and logprobs were requested. + # Contains the complete array of log probabilities for all content tokens. + # + # @example + # event.content.each do |logprob| + # puts "Token: #{logprob.token}, Logprob: #{logprob.logprob}" + # end + class ChatLogprobsContentDoneEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"logprobs.content.done" + required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] } + end + + # Incremental logprobs update for refusal tokens. + # + # Emitted when logprobs are requested and refusal tokens are being generated. + # Contains log probability information for refusal message tokens. + # + # @example + # event.refusal[0].token # => "I" + # event.refusal[0].logprob # => -0.12345 + # event.snapshot # => [all refusal logprobs accumulated so far] + class ChatLogprobsRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"logprobs.refusal.delta" + required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] } + required :snapshot, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] } + end + + # Logprobs completion event for refusal tokens. + # + # Emitted when refusal generation is complete and logprobs were requested. + # Contains the complete array of log probabilities for all refusal tokens. + # + # @example + # event.refusal.each do |logprob| + # puts "Refusal token: #{logprob.token}, Logprob: #{logprob.logprob}" + # end + class ChatLogprobsRefusalDoneEvent < OpenAI::Internal::Type::BaseModel + required :type, const: :"logprobs.refusal.done" + required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] } + end + end + end +end diff --git a/lib/openai/helpers/streaming/exceptions.rb b/lib/openai/helpers/streaming/exceptions.rb new file mode 100644 index 00000000..b4deb3a2 --- /dev/null +++ b/lib/openai/helpers/streaming/exceptions.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module OpenAI + module Helpers + module Streaming + class StreamError < StandardError; end + + class LengthFinishReasonError < StreamError + attr_reader :completion + + def initialize(completion:) + @completion = completion + super("Stream finished due to length limit") + end + end + + class ContentFilterFinishReasonError < StreamError + def initialize + super("Stream finished due to content filter") + end + end + end + end +end + +module OpenAI + LengthFinishReasonError = Helpers::Streaming::LengthFinishReasonError + ContentFilterFinishReasonError = Helpers::Streaming::ContentFilterFinishReasonError +end diff --git a/lib/openai/helpers/streaming/events.rb b/lib/openai/helpers/streaming/response_events.rb similarity index 100% rename from lib/openai/helpers/streaming/events.rb rename to lib/openai/helpers/streaming/response_events.rb diff --git a/lib/openai/helpers/streaming/response_stream.rb b/lib/openai/helpers/streaming/response_stream.rb index 0aa3eb0f..c26d6a71 100644 --- a/lib/openai/helpers/streaming/response_stream.rb +++ b/lib/openai/helpers/streaming/response_stream.rb @@ -1,7 +1,5 @@ # frozen_string_literal: true -require_relative "events" - module OpenAI module Helpers module Streaming diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index bc25753e..aea3e450 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -566,7 +566,8 @@ class << self # # @return [Array(String, Enumerable)] private def encode_multipart_streaming(body) - boundary = SecureRandom.urlsafe_base64(60) + # RFC 1521 Section 7.2.1 says we should have 70 char maximum for boundary length + boundary = SecureRandom.urlsafe_base64(46) closing = [] strio = writable_enum do |y| diff --git a/lib/openai/models/chat/parsed_chat_completion.rb b/lib/openai/models/chat/parsed_chat_completion.rb new file mode 100644 index 00000000..db581070 --- /dev/null +++ b/lib/openai/models/chat/parsed_chat_completion.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ParsedChoice < OpenAI::Models::Chat::ChatCompletion::Choice + optional :finish_reason, enum: -> { OpenAI::Chat::ChatCompletion::Choice::FinishReason }, nil?: true + end + + class ParsedChatCompletion < ChatCompletion + required :choices, -> { OpenAI::Internal::Type::ArrayOf[ParsedChoice] } + end + end + end +end diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 0f69e130..33034b22 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -110,6 +110,54 @@ def create(params) raise ArgumentError.new(message) end + model, tool_models = get_structured_output_models(parsed) + + # rubocop:disable Metrics/BlockLength + unwrap = ->(raw) do + if model.is_a?(OpenAI::StructuredOutput::JsonSchemaConverter) + raw[:choices]&.each do |choice| + message = choice.fetch(:message) + begin + content = message.fetch(:content) + parsed = content.nil? ? nil : JSON.parse(content, symbolize_names: true) + rescue JSON::ParserError => e + parsed = e + end + coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed) + message.store(:parsed, coerced) + end + end + raw[:choices]&.each do |choice| + choice.dig(:message, :tool_calls)&.each do |tool_call| + func = tool_call.fetch(:function) + next if (model = tool_models[func.fetch(:name)]).nil? + + begin + arguments = func.fetch(:arguments) + parsed = arguments.nil? ? nil : JSON.parse(arguments, symbolize_names: true) + rescue JSON::ParserError => e + parsed = e + end + coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed) + func.store(:parsed, coerced) + end + end + + raw + end + # rubocop:enable Metrics/BlockLength + + @client.request( + method: :post, + path: "chat/completions", + body: parsed, + unwrap: unwrap, + model: OpenAI::Chat::ChatCompletion, + options: options + ) + end + + def get_structured_output_models(parsed) model = nil tool_models = {} case parsed @@ -162,53 +210,46 @@ def create(params) else end - # rubocop:disable Metrics/BlockLength - unwrap = ->(raw) do - if model.is_a?(OpenAI::StructuredOutput::JsonSchemaConverter) - raw[:choices]&.each do |choice| - message = choice.fetch(:message) - begin - content = message.fetch(:content) - parsed = content.nil? ? nil : JSON.parse(content, symbolize_names: true) - rescue JSON::ParserError => e - parsed = e - end - coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed) - message.store(:parsed, coerced) - end - end - raw[:choices]&.each do |choice| - choice.dig(:message, :tool_calls)&.each do |tool_call| - func = tool_call.fetch(:function) - next if (model = tool_models[func.fetch(:name)]).nil? + [model, tool_models] + end - begin - arguments = func.fetch(:arguments) - parsed = arguments.nil? ? nil : JSON.parse(arguments, symbolize_names: true) - rescue JSON::ParserError => e - parsed = e - end - coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed) - func.store(:parsed, coerced) - end - end + def build_tools_with_models(tools, tool_models) + return [] if tools.nil? - raw + tools.map do |tool| + next tool unless tool[:type] == :function + + function_name = tool.dig(:function, :name) + model = tool_models[function_name] + + model ? tool.merge(model: model) : tool end - # rubocop:enable Metrics/BlockLength + end - @client.request( + def stream(params) + parsed, options = OpenAI::Chat::CompletionCreateParams.dump_request(params) + + parsed.store(:stream, true) + + response_format, tool_models = get_structured_output_models(parsed) + + input_tools = build_tools_with_models(parsed[:tools], tool_models) + + raw_stream = @client.request( method: :post, path: "chat/completions", + headers: {"accept" => "text/event-stream"}, body: parsed, - unwrap: unwrap, - model: OpenAI::Chat::ChatCompletion, + stream: OpenAI::Internal::Stream, + model: OpenAI::Chat::ChatCompletionChunk, options: options ) - end - def stream - raise NotImplementedError.new("higher level helpers are coming soon!") + OpenAI::Helpers::Streaming::ChatCompletionStream.new( + raw_stream: raw_stream, + response_format: response_format, + input_tools: input_tools + ) end # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index ad7aa2d4..0f516293 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -85,7 +85,7 @@ class Responses def create(params = {}) parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#stream` for the streaming use case." raise ArgumentError.new(message) end diff --git a/lib/openai/version.rb b/lib/openai/version.rb index fa1b7463..472922d8 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.26.0" + VERSION = "0.27.0" end diff --git a/rbi/openai/helpers/streaming/events.rbi b/rbi/openai/helpers/streaming/events.rbi index a084cb21..ecef6847 100644 --- a/rbi/openai/helpers/streaming/events.rbi +++ b/rbi/openai/helpers/streaming/events.rbi @@ -26,6 +26,126 @@ module OpenAI def response end end + + class ChatChunkEvent < OpenAI::Internal::Type::BaseModel + sig { returns(T.untyped) } + def chunk + end + + sig { returns(T.untyped) } + def snapshot + end + end + + class ChatContentDeltaEvent < OpenAI::Internal::Type::BaseModel + sig { returns(String) } + def delta + end + + sig { returns(String) } + def snapshot + end + + sig { returns(T.untyped) } + def parsed + end + end + + class ChatContentDoneEvent < OpenAI::Internal::Type::BaseModel + sig { returns(String) } + def content + end + + sig { returns(T.untyped) } + def parsed + end + end + + class ChatRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel + sig { returns(String) } + def delta + end + + sig { returns(String) } + def snapshot + end + end + + class ChatRefusalDoneEvent < OpenAI::Internal::Type::BaseModel + sig { returns(String) } + def refusal + end + end + + class ChatFunctionToolCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + sig { returns(String) } + def name + end + + sig { returns(Integer) } + def index + end + + sig { returns(String) } + def arguments_delta + end + + sig { returns(String) } + def arguments + end + + sig { returns(T.untyped) } + def parsed_arguments + end + end + + class ChatFunctionToolCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + sig { returns(String) } + def name + end + + sig { returns(Integer) } + def index + end + + sig { returns(String) } + def arguments + end + + sig { returns(T.untyped) } + def parsed_arguments + end + end + + class ChatLogprobsContentDeltaEvent < OpenAI::Internal::Type::BaseModel + sig { returns(T.untyped) } + def content + end + end + + class ChatLogprobsContentDoneEvent < OpenAI::Internal::Type::BaseModel + sig { returns(T.untyped) } + def content + end + end + + class ChatLogprobsRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel + sig { returns(T.untyped) } + def refusal + end + end + + class ChatLogprobsRefusalDoneEvent < OpenAI::Internal::Type::BaseModel + sig { returns(T.untyped) } + def refusal + end + end + + class ChatCompletionStream + sig { returns(T.untyped) } + def each + end + end end end end diff --git a/rbi/openai/streaming.rbi b/rbi/openai/streaming.rbi index c448d83c..37831c31 100644 --- a/rbi/openai/streaming.rbi +++ b/rbi/openai/streaming.rbi @@ -1,5 +1,32 @@ # typed: strong module OpenAI - Streaming = OpenAI::Helpers::Streaming + module Streaming + ResponseTextDeltaEvent = OpenAI::Helpers::Streaming::ResponseTextDeltaEvent + ResponseTextDoneEvent = OpenAI::Helpers::Streaming::ResponseTextDoneEvent + ResponseFunctionCallArgumentsDeltaEvent = + OpenAI::Helpers::Streaming::ResponseFunctionCallArgumentsDeltaEvent + ResponseCompletedEvent = OpenAI::Helpers::Streaming::ResponseCompletedEvent + + ChatChunkEvent = OpenAI::Helpers::Streaming::ChatChunkEvent + ChatContentDeltaEvent = OpenAI::Helpers::Streaming::ChatContentDeltaEvent + ChatContentDoneEvent = OpenAI::Helpers::Streaming::ChatContentDoneEvent + ChatRefusalDeltaEvent = OpenAI::Helpers::Streaming::ChatRefusalDeltaEvent + ChatRefusalDoneEvent = OpenAI::Helpers::Streaming::ChatRefusalDoneEvent + ChatFunctionToolCallArgumentsDeltaEvent = + OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDeltaEvent + ChatFunctionToolCallArgumentsDoneEvent = + OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent + ChatLogprobsContentDeltaEvent = + OpenAI::Helpers::Streaming::ChatLogprobsContentDeltaEvent + ChatLogprobsContentDoneEvent = + OpenAI::Helpers::Streaming::ChatLogprobsContentDoneEvent + ChatLogprobsRefusalDeltaEvent = + OpenAI::Helpers::Streaming::ChatLogprobsRefusalDeltaEvent + ChatLogprobsRefusalDoneEvent = + OpenAI::Helpers::Streaming::ChatLogprobsRefusalDoneEvent + + ResponseStream = OpenAI::Helpers::Streaming::ResponseStream + ChatCompletionStream = OpenAI::Helpers::Streaming::ChatCompletionStream + end end diff --git a/scripts/fast-format b/scripts/fast-format new file mode 100755 index 00000000..6d5973fb --- /dev/null +++ b/scripts/fast-format @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +set -euo pipefail + +echo "Script started with $# arguments" +echo "Arguments: $*" +echo "Script location: $(dirname "$0")" + +cd -- "$(dirname "$0")/.." +echo "Changed to directory: $PWD" + +if [ $# -eq 0 ]; then + echo "Usage: $0 [additional-formatter-args...]" + echo "The file should contain one file path per line" + exit 1 +fi + +exec -- bundle exec rake format FORMAT_FILE="$1" diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index fc91db45..26bacf49 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -213,6 +213,18 @@ def env_table end end + def test_encoding_length + headers, = OpenAI::Internal::Util.encode_content( + {"content-type" => "multipart/form-data"}, + Pathname(__FILE__) + ) + assert_pattern do + headers.fetch("content-type") => /boundary=(.+)$/ + end + field, = Regexp.last_match.captures + assert(field.length < 70 - 6) + end + def test_file_encode file = Pathname(__FILE__) headers = {"content-type" => "multipart/form-data"} diff --git a/test/openai/resources/chat/completions/snapshots/173417d55340.txt b/test/openai/resources/chat/completions/snapshots/173417d55340.txt new file mode 100644 index 00000000..49c6dce9 --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/173417d55340.txt @@ -0,0 +1,28 @@ +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"role":"assistant","content":null,"refusal":""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":"I'm"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" sorry"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":","},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" I"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" can't"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" assist"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" with"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" that"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" request"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":"."},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw4IfQfCCrcuybFm41wJyxjbkz7","object":"chat.completion.chunk","created":1727346172,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[],"usage":{"prompt_tokens":79,"completion_tokens":11,"total_tokens":90,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/2018feb66ae1.txt b/test/openai/resources/chat/completions/snapshots/2018feb66ae1.txt new file mode 100644 index 00000000..87197067 --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/2018feb66ae1.txt @@ -0,0 +1,22 @@ +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_4XzlGBLtUe9dy3GVNV4jhq7h","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"city"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"New"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" York"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" City"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]} + +data: {"id":"chatcmpl-ABfwERreu9s99xXsVuOWtIB2UOx62","object":"chat.completion.chunk","created":1727346182,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_143bb8492c","choices":[],"usage":{"prompt_tokens":44,"completion_tokens":16,"total_tokens":60,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/4cc50a6135d2.txt b/test/openai/resources/chat/completions/snapshots/4cc50a6135d2.txt new file mode 100644 index 00000000..c3392883 --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/4cc50a6135d2.txt @@ -0,0 +1,10 @@ +data: {"id":"chatcmpl-ABfw3Oqj8RD0z6aJiiX37oTjV2HFh","object":"chat.completion.chunk","created":1727346171,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw3Oqj8RD0z6aJiiX37oTjV2HFh","object":"chat.completion.chunk","created":1727346171,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"content":"{\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw3Oqj8RD0z6aJiiX37oTjV2HFh","object":"chat.completion.chunk","created":1727346171,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"length"}]} + +data: {"id":"chatcmpl-ABfw3Oqj8RD0z6aJiiX37oTjV2HFh","object":"chat.completion.chunk","created":1727346171,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[],"usage":{"prompt_tokens":79,"completion_tokens":1,"total_tokens":80,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/569c877e6942.txt b/test/openai/resources/chat/completions/snapshots/569c877e6942.txt new file mode 100644 index 00000000..47dd7315 --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/569c877e6942.txt @@ -0,0 +1,30 @@ +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"role":"assistant","content":null,"refusal":""},"logprobs":{"content":null,"refusal":[]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":"I'm"},"logprobs":{"content":null,"refusal":[{"token":"I'm","logprob":-0.0012038043,"bytes":[73,39,109],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" very"},"logprobs":{"content":null,"refusal":[{"token":" very","logprob":-0.8438816,"bytes":[32,118,101,114,121],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" sorry"},"logprobs":{"content":null,"refusal":[{"token":" sorry","logprob":-3.4121115e-6,"bytes":[32,115,111,114,114,121],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":","},"logprobs":{"content":null,"refusal":[{"token":",","logprob":-0.000033809047,"bytes":[44],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" but"},"logprobs":{"content":null,"refusal":[{"token":" but","logprob":-0.038048144,"bytes":[32,98,117,116],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" I"},"logprobs":{"content":null,"refusal":[{"token":" I","logprob":-0.0016109125,"bytes":[32,73],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" can't"},"logprobs":{"content":null,"refusal":[{"token":" can't","logprob":-0.0073532974,"bytes":[32,99,97,110,39,116],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" assist"},"logprobs":{"content":null,"refusal":[{"token":" assist","logprob":-0.0020837625,"bytes":[32,97,115,115,105,115,116],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" with"},"logprobs":{"content":null,"refusal":[{"token":" with","logprob":-0.00318354,"bytes":[32,119,105,116,104],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":" that"},"logprobs":{"content":null,"refusal":[{"token":" that","logprob":-0.0017186158,"bytes":[32,116,104,97,116],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"refusal":"."},"logprobs":{"content":null,"refusal":[{"token":".","logprob":-0.57687104,"bytes":[46],"top_logprobs":[]}]},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw5GEVqPbLY576l46FZDQoNJ2KC","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[],"usage":{"prompt_tokens":79,"completion_tokens":12,"total_tokens":91,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/7e5ea4d12e7c.txt b/test/openai/resources/chat/completions/snapshots/7e5ea4d12e7c.txt new file mode 100644 index 00000000..801db2ad --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/7e5ea4d12e7c.txt @@ -0,0 +1,36 @@ +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"{\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"city"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"San"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"temperature"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"61"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"units"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"f"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\"}"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw1e5abtU8OwGr15vOreYVb2MiF","object":"chat.completion.chunk","created":1727346169,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[],"usage":{"prompt_tokens":79,"completion_tokens":14,"total_tokens":93,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/83b060bae42e.txt b/test/openai/resources/chat/completions/snapshots/83b060bae42e.txt new file mode 100644 index 00000000..e9f34b63 --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/83b060bae42e.txt @@ -0,0 +1,12 @@ +data: {"id":"chatcmpl-ABfw5EzoqmfXjnnsXY7Yd8OC6tb3c","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":{"content":[],"refusal":null},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5EzoqmfXjnnsXY7Yd8OC6tb3c","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Foo"},"logprobs":{"content":[{"token":"Foo","logprob":-0.0025094282,"bytes":[70,111,111],"top_logprobs":[]}],"refusal":null},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5EzoqmfXjnnsXY7Yd8OC6tb3c","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"!"},"logprobs":{"content":[{"token":"!","logprob":-0.26638845,"bytes":[33],"top_logprobs":[]}],"refusal":null},"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw5EzoqmfXjnnsXY7Yd8OC6tb3c","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw5EzoqmfXjnnsXY7Yd8OC6tb3c","object":"chat.completion.chunk","created":1727346173,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[],"usage":{"prompt_tokens":9,"completion_tokens":2,"total_tokens":11,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/a247c49c5fcd.txt b/test/openai/resources/chat/completions/snapshots/a247c49c5fcd.txt new file mode 100644 index 00000000..b44d334a --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/a247c49c5fcd.txt @@ -0,0 +1,28 @@ +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_CTf1nWJLqSeRgDqaCG27xZ74","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"city"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"San"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" Francisco"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\",\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"state"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"CA"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]} + +data: {"id":"chatcmpl-ABfwCgi41eStOcARjZq97ohCEGBPO","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[],"usage":{"prompt_tokens":48,"completion_tokens":19,"total_tokens":67,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/a491adda08c3.txt b/test/openai/resources/chat/completions/snapshots/a491adda08c3.txt new file mode 100644 index 00000000..160e65de --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/a491adda08c3.txt @@ -0,0 +1,100 @@ +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"{\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"{\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"{\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"city"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"city"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"city"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"San"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"San"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"San"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"\",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"\",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"\",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"temperature"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"temperature"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"temperature"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"65"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"61"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"59"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":",\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"units"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"units"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"units"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"\":\""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"f"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"f"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"f"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{"content":"\"}"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{"content":"\"}"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{"content":"\"}"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":1,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[{"index":2,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw2KKFuVXmEJgVwYfBvejMAdWtq","object":"chat.completion.chunk","created":1727346170,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_b40fb1c6fb","choices":[],"usage":{"prompt_tokens":79,"completion_tokens":42,"total_tokens":121,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/c6aa7e397b71.txt b/test/openai/resources/chat/completions/snapshots/c6aa7e397b71.txt new file mode 100644 index 00000000..f20333fb --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/c6aa7e397b71.txt @@ -0,0 +1,36 @@ +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_c91SqDXlYFuETYv8mUHzz6pp","type":"function","function":{"name":"GetWeatherArgs","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"city"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Ed"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"inburgh"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\",\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"country"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"UK"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\",\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"units"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"c"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]} + +data: {"id":"chatcmpl-ABfw8AOXnoa2kzy11vVTSjuQhHCQr","object":"chat.completion.chunk","created":1727346176,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_7568d46099","choices":[],"usage":{"prompt_tokens":76,"completion_tokens":24,"total_tokens":100,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/d61558011839.txt b/test/openai/resources/chat/completions/snapshots/d61558011839.txt new file mode 100644 index 00000000..aee8650c --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/d61558011839.txt @@ -0,0 +1,362 @@ +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" {\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"location"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"San"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" CA"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"weather"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" {\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"temperature"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"18"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"°C"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"condition"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Part"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"ly"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" Cloud"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"y"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"humidity"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"72"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"%\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"wind"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Speed"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"15"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" km"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"/h"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"wind"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Direction"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"NW"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\"\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" },\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"forecast"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" [\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" {\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"day"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Monday"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"high"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"20"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"°C"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"low"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"14"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"°C"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"condition"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Sunny"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\"\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" },\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" {\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"day"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Tuesday"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"high"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"19"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"°C"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"low"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"15"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"°C"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"condition"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Mostly"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" Cloud"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"y"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\"\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" },\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" {\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"day"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Wednesday"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"high"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"18"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"°C"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"low"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"14"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"°C"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\",\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"condition"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" \""},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"Cloud"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"y"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"\"\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" }\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" ]\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" }\n"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfwCjPMi0ubw56UyMIIeNfJzyogq","object":"chat.completion.chunk","created":1727346180,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[],"usage":{"prompt_tokens":19,"completion_tokens":177,"total_tokens":196,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/e2aad469b71d.txt b/test/openai/resources/chat/completions/snapshots/e2aad469b71d.txt new file mode 100644 index 00000000..b68ca8a3 --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/e2aad469b71d.txt @@ -0,0 +1,68 @@ +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"I'm"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" unable"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" to"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" provide"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" real"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" updates"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" To"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" get"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" the"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" current"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" in"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" San"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" recommend"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" checking"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" reliable"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" website"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" or"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":" app"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-ABfw031mOJeYCSHe4yI2ZjOA6kMJL","object":"chat.completion.chunk","created":1727346168,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[],"usage":{"prompt_tokens":14,"completion_tokens":30,"total_tokens":44,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/snapshots/f82268f2fefd.txt b/test/openai/resources/chat/completions/snapshots/f82268f2fefd.txt new file mode 100644 index 00000000..3b111d5e --- /dev/null +++ b/test/openai/resources/chat/completions/snapshots/f82268f2fefd.txt @@ -0,0 +1,52 @@ +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_JMW1whyEaYG438VE1OIflxA2","type":"function","function":{"name":"GetWeatherArgs","arguments":""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"ci"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ty\": "}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"Edinb"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"urgh"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\", \"c"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ountry"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\": \""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"GB\", "}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"units"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\": \""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"c\"}"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_DNYTawLBoN8fj3KN6qU9N1Ou","type":"function","function":{"name":"get_stock_price","arguments":""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"ti"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"cker\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":": \"AAP"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"L\", "}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"\"exch"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"ange\":"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":" \"NA"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"SDAQ\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"}"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]} + +data: {"id":"chatcmpl-ABfwAwrNePHUgBBezonVC6MX3zd63","object":"chat.completion.chunk","created":1727346178,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_5050236cbd","choices":[],"usage":{"prompt_tokens":149,"completion_tokens":60,"total_tokens":209,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] + diff --git a/test/openai/resources/chat/completions/streaming_snapshot_test.rb b/test/openai/resources/chat/completions/streaming_snapshot_test.rb new file mode 100644 index 00000000..786381be --- /dev/null +++ b/test/openai/resources/chat/completions/streaming_snapshot_test.rb @@ -0,0 +1,924 @@ +# frozen_string_literal: true + +require_relative "../../../test_helper" + +class OpenAI::Test::Resources::Chat::Completions::StreamingSnapshotTest < Minitest::Test + extend Minitest::Serial + include WebMock::API + + def before_all + super + WebMock.enable! + end + + def after_all + WebMock.disable! + super + end + + def setup + super + @client = OpenAI::Client.new(base_url: "http://localhost", api_key: "test-key") + end + + def teardown + WebMock.reset! + super + end + + def load_snapshot(filename) + snapshot_path = File.join(__dir__, "snapshots", "#{filename}.txt") + File.read(snapshot_path) + end + + def stub_streaming_response(request_matcher, response_body) + stub_request(:post, "http://localhost/chat/completions") + .with(request_matcher) + .to_return( + status: 200, + headers: {"Content-Type" => "text/event-stream"}, + body: response_body + ) + end + + class StreamListener + attr_reader :stream, :events + + def initialize(stream) + @stream = stream + @events = [] + end + + def collect + @stream.each do |event| + @events << event + end + self + end + + def get_event_by_type(event_type) + @events.find { |e| e.type == event_type } + end + end + + # Test BaseModel classes + class LocationWeather < OpenAI::BaseModel + required :city, String + required :temperature, Float + required :units, String # "c" or "f" + end + + class LocationWeatherMultiple < OpenAI::BaseModel + required :city, String + required :temperature, Float + required :units, String + end + + class LocationWeatherMaxTokens < OpenAI::BaseModel + required :city, String + required :temperature, Float + required :units, String + end + + class LocationWeatherRefusal < OpenAI::BaseModel + required :city, String + required :temperature, Float + required :units, String + end + + class LocationWeatherLogprobs < OpenAI::BaseModel + required :city, String + required :temperature, Float + required :units, String + end + + class GetWeatherArgs < OpenAI::BaseModel + required :city, String + required :country, String + required :units, String, nil?: true, default: "c" + end + + class GetWeatherArgsMultiple < OpenAI::BaseModel + required :city, String + required :country, String + required :units, String, nil?: true, default: "c" + end + + class GetStockPrice < OpenAI::BaseModel + required :ticker, String + required :exchange, String + end + + def make_stream_snapshot_request(params, snapshot_content, &_block) + # Match minimally to avoid depending on client-side coercions + expected_body = { + model: params[:model], + messages: params[:messages], + stream: true + } + + stub_streaming_response({body: hash_including(expected_body)}, snapshot_content) + + stream = @client.chat.completions.stream(**params) + listener = StreamListener.new(stream) + + if block_given? + stream.each do |event| + listener.events << event + yield(stream, event) + end + else + listener.collect + end + + listener + end + + def make_raw_stream_snapshot_request(params, snapshot_content) + # Match minimally to avoid depending on client-side coercions + expected_body = { + model: params[:model], + messages: params[:messages], + stream: true + } + + stub_streaming_response({body: hash_including(expected_body)}, snapshot_content) + + state = OpenAI::Helpers::Streaming::ChatCompletionStreamState.new + stream = @client.chat.completions.stream(**params) + + stream.each do |chunk| + state.handle_chunk(chunk) + end + + state + end + + def test_parse_nothing + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in SF?" + } + ] + }, + load_snapshot("e2aad469b71d") + ) + + completion = listener.stream.get_final_completion + choice = completion.choices.first + + assert_equal(:stop, choice.finish_reason) + assert_equal(0, choice.index) + assert_nil(choice.logprobs) + assert_match(/unable to provide real-time weather/i, choice.message.content) + assert_nil(choice.message.parsed) + assert_nil(choice.message.refusal) + assert_equal(:assistant, choice.message.role) + + content_done = listener.events.find { |e| e.type == :"content.done" } + assert_pattern do + content_done => OpenAI::Helpers::Streaming::ChatContentDoneEvent[ + type: :"content.done", + content: /unable to provide real-time weather/i, + parsed: nil + ] + end + end + + def test_parse_basemodel + done_snapshots = [] + + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in SF?" + } + ], + response_format: LocationWeather + }, + load_snapshot("7e5ea4d12e7c") + ) do |stream, event| + if event.type == :"content.done" + done_snapshots << stream.current_completion_snapshot.dup + end + end + + assert_equal(1, done_snapshots.length) + assert_kind_of(LocationWeather, done_snapshots.first.choices.first.message.parsed) + + # Check for content.delta events (without parsed data since we don't support partial JSON) + content_delta_events = listener.events.select { |e| e.type == :"content.delta" } + + # Content delta events should exist but won't have parsed data + assert(content_delta_events.any?, "Should have content.delta events") + + # Delta events should have nil parsed data since we removed partial JSON support + content_delta_events.each do |event| + assert_nil(event.parsed, "Delta events should not have parsed data without partial JSON support") + end + + completion = listener.stream.get_final_completion + assert_pattern do + completion => { + choices: [ + { + finish_reason: :stop, + index: 0, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: /{.*"city".*"temperature".*"units".*}/, + function_call: nil, + parsed: LocationWeather[ + city: "San Francisco", + temperature: 61.0, + units: "f" + ], + refusal: nil, + role: :assistant, + tool_calls: nil + } + } + ], + created: Integer, + id: String, + model: "gpt-4o-2024-08-06", + object: :"chat.completion", + service_tier: nil, + system_fingerprint: String, + usage: { + completion_tokens: Integer, + completion_tokens_details: Object, + prompt_tokens: Integer, + prompt_tokens_details: nil, + total_tokens: Integer + } + } + end + + content_done = listener.events.find { |e| e.type == :"content.done" } + assert_pattern do + content_done => OpenAI::Helpers::Streaming::ChatContentDoneEvent[ + type: :"content.done", + content: /{.*"city".*"temperature".*"units".*}/, + parsed: LocationWeather[ + city: "San Francisco", + temperature: 61.0, + units: "f" + ] + ] + end + end + + def test_parse_basemodel_multiple_choices + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in SF?" + } + ], + n: 3, + response_format: LocationWeather + }, + load_snapshot("a491adda08c3") + ) + + # Check event sequence + event_types = listener.events.map(&:type) + + # Count event types + chunk_count = event_types.count(:chunk) + content_delta_count = event_types.count(:"content.delta") + content_done_count = event_types.count(:"content.done") + + assert(chunk_count.positive?) + assert(content_delta_count.positive?) + assert_equal(3, content_done_count) + + completion = listener.stream.get_final_completion + assert_equal(3, completion.choices.length) + + # Expected temperatures for each choice index + expected_temperatures = [65.0, 61.0, 59.0] + + completion.choices.each_with_index do |choice, idx| + expected_temp = expected_temperatures[idx] + assert_pattern do + choice => { + finish_reason: :stop, + index: ^idx, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: /{.*"city".*"temperature".*"units".*}/, + function_call: nil, + parsed: LocationWeather[ + city: "San Francisco", + temperature: ^expected_temp, + units: "f" + ], + refusal: nil, + role: :assistant, + tool_calls: nil + } + } + end + end + end + + def test_parse_max_tokens_reached + error = assert_raises(OpenAI::LengthFinishReasonError) do + make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in SF?" + } + ], + max_tokens: 1, + response_format: LocationWeather + }, + load_snapshot("4cc50a6135d2") + ) + end + + assert_match(/length|max_tokens/, error.message.to_s.downcase) + end + + def test_parse_basemodel_refusal + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "How do I make " + } + ], + response_format: LocationWeather + }, + load_snapshot("173417d55340") + ) + + refusal_done = listener.events.find { |e| e.type == :"refusal.done" } + assert_pattern do + refusal_done => OpenAI::Helpers::Streaming::ChatRefusalDoneEvent[ + type: :"refusal.done", + refusal: /sorry.*can't assist/i + ] + end + + completion = listener.stream.get_final_completion + choice = completion.choices.first + assert_pattern do + choice => { + finish_reason: :stop, + index: 0, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: nil, + function_call: nil, + parsed: nil, + refusal: /sorry.*can't assist/i, + role: :assistant, + tool_calls: nil + } + } + end + end + + def test_content_logprobs_events + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "Say foo" + } + ], + logprobs: true + }, + load_snapshot("83b060bae42e") + ) + + logprobs_content_delta = listener.events.find { |e| e.type == :"logprobs.content.delta" } + assert(logprobs_content_delta) + assert_pattern do + logprobs_content_delta => OpenAI::Helpers::Streaming::ChatLogprobsContentDeltaEvent[ + type: :"logprobs.content.delta", + content: Array, + snapshot: Array + ] + end + + logprobs_content_done = listener.events.find { |e| e.type == :"logprobs.content.done" } + assert(logprobs_content_done) + assert_pattern do + logprobs_content_done => OpenAI::Helpers::Streaming::ChatLogprobsContentDoneEvent[ + type: :"logprobs.content.done", + content: Array + ] + end + + assert(logprobs_content_done.content.all? { |lp| lp.respond_to?(:token) && lp.respond_to?(:logprob) }) + + completion = listener.stream.get_final_completion + choice = completion.choices.first + assert_pattern do + choice => { + finish_reason: :stop, + index: 0, + logprobs: { + content: Array, + refusal: nil + }, + message: { + annotations: nil, + audio: nil, + content: /Foo/, + function_call: nil, + parsed: nil, + refusal: nil, + role: :assistant, + tool_calls: nil + } + } + end + end + + def test_refusal_logprobs_events + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "How do I make " + } + ], + logprobs: true, + response_format: LocationWeather + }, + load_snapshot("569c877e6942") + ) + + logprobs_refusal_delta = listener.events.find { |e| e.type == :"logprobs.refusal.delta" } + assert(logprobs_refusal_delta) + assert_pattern do + logprobs_refusal_delta => OpenAI::Helpers::Streaming::ChatLogprobsRefusalDeltaEvent[ + type: :"logprobs.refusal.delta", + refusal: Array, + snapshot: Array + ] + end + + logprobs_refusal_done = listener.events.find { |e| e.type == :"logprobs.refusal.done" } + assert(logprobs_refusal_done) + assert_pattern do + logprobs_refusal_done => OpenAI::Helpers::Streaming::ChatLogprobsRefusalDoneEvent[ + type: :"logprobs.refusal.done", + refusal: Array + ] + end + + completion = listener.stream.get_final_completion + choice = completion.choices.first + assert_pattern do + choice => { + finish_reason: :stop, + index: 0, + logprobs: { + content: nil, + refusal: Array + }, + message: { + annotations: nil, + audio: nil, + content: nil, + function_call: nil, + parsed: nil, + refusal: /sorry.*can't assist/i, + role: :assistant, + tool_calls: nil + } + } + end + end + + def test_parse_basemodel_tool + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in Edinburgh?" + } + ], + tools: [ + { + type: :function, + function: { + name: "GetWeatherArgs", + description: "Function that accepts GetWeatherArgs parameters", + parameters: GetWeatherArgs, + strict: true + } + } + ] + }, + load_snapshot("c6aa7e397b71") + ) + + # Test the function arguments done event + args_done = listener.events.find { |e| e.type == :"tool_calls.function.arguments.done" } + assert_pattern do + args_done => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: String, + index: 0, + arguments: /{.*"city".*"Edinburgh".*"country".*}/, + parsed: GetWeatherArgs[ + city: "Edinburgh", + country: "UK", + units: "c" + ] + ] + end + + completion = listener.stream.get_final_completion + choice = completion.choices.first + + assert_pattern do + choice => { + finish_reason: :tool_calls, + index: 0, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: nil, + function_call: nil, + parsed: nil, + refusal: nil, + role: :assistant, + tool_calls: [ + { + function: { + arguments: /{.*"city".*"Edinburgh".*"country".*}/, + name: String, + parsed: GetWeatherArgs[ + city: "Edinburgh", + country: "UK", + units: "c" + ] + }, + id: String, + type: :function + } + ] + } + } + end + end + + def test_parse_multiple_basemodel_tools + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in Edinburgh?" + }, + { + role: "user", + content: "What's the price of AAPL?" + } + ], + tools: [ + { + type: :function, + function: { + name: "GetWeatherArgs", + description: "Function that accepts GetWeatherArgs parameters", + parameters: GetWeatherArgsMultiple, + strict: true + } + }, + { + type: :function, + function: { + name: "get_stock_price", + description: "Fetch the latest price for a given ticker", + parameters: GetStockPrice, + strict: true + } + } + ] + }, + load_snapshot("f82268f2fefd") + ) + + # Test the function arguments done events + args_done_events = listener.events.select { |e| e.type == :"tool_calls.function.arguments.done" } + assert_equal(2, args_done_events.length) + + # First tool call event (weather) + weather_event = args_done_events[0] + assert_pattern do + weather_event => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: /GetWeatherArgs|get_weather_args/i, + index: 0, + arguments: /{.*"city".*"Edinburgh".*"country".*}/, + parsed: GetWeatherArgsMultiple[ + city: "Edinburgh", + country: "GB" + ] + ] + end + + # Second tool call event (stock) + stock_event = args_done_events[1] + assert_pattern do + stock_event => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: "get_stock_price", + index: 1, + arguments: /{.*"ticker".*"AAPL".*"exchange".*}/, + parsed: GetStockPrice[ + ticker: "AAPL", + exchange: "NASDAQ" + ] + ] + end + + completion = listener.stream.get_final_completion + choice = completion.choices.first + + assert_equal(2, choice.message.tool_calls.length) + + # Check first tool call (weather) + weather_call = choice.message.tool_calls[0] + assert_pattern do + weather_call => { + function: { + arguments: /{.*"city".*"Edinburgh".*"country".*}/, + name: /GetWeatherArgs|get_weather_args/i, + parsed: GetWeatherArgsMultiple[ + city: "Edinburgh", + country: "GB" + ] + }, + id: String, + type: :function + } + end + + # Check second tool call (stock) + stock_call = choice.message.tool_calls[1] + assert_pattern do + stock_call => { + function: { + arguments: /{.*"ticker".*"AAPL".*"exchange".*}/, + name: "get_stock_price", + parsed: GetStockPrice[ + ticker: "AAPL", + exchange: "NASDAQ" + ] + }, + id: String, + type: :function + } + end + end + + def test_parse_strict_tools + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in SF?" + } + ], + tools: [ + { + type: :function, + function: { + name: "get_weather", + parameters: { + type: "object", + properties: { + city: {type: "string"}, + state: {type: "string"} + }, + required: %w[city state], + additionalProperties: false + }, + strict: true + } + } + ] + }, + load_snapshot("a247c49c5fcd") + ) + + completion = listener.stream.get_final_completion + choice = completion.choices.first + + assert_pattern do + choice => { + finish_reason: :tool_calls, + index: 0, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: nil, + function_call: nil, + parsed: nil, + refusal: nil, + role: :assistant, + tool_calls: [ + { + function: { + arguments: /{.*"city".*"San Francisco".*"state".*"CA".*}/, + name: "get_weather", + parsed: { + city: "San Francisco", + state: "CA" + } + }, + id: String, + type: :function + } + ] + } + } + end + end + + def test_non_basemodel_response_format + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in SF? Give me any JSON back" + } + ], + response_format: {type: "json_object"} + }, + load_snapshot("d61558011839") + ) + + completion = listener.stream.get_final_completion + choice = completion.choices.first + + assert_pattern do + choice => { + finish_reason: :stop, + index: 0, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: /{.*"location".*San Francisco.*"weather".*}/m, + function_call: nil, + parsed: nil, + refusal: nil, + role: :assistant, + tool_calls: nil + } + } + end + end + + def test_allows_non_strict_tools_but_no_parsing + listener = make_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "what's the weather in NYC?" + } + ], + tools: [ + { + type: :function, + function: { + name: "get_weather", + parameters: { + type: "object", + properties: { + city: {type: "string"} + } + } + } + } + ] + }, + load_snapshot("2018feb66ae1") + ) + + args_done = listener.events.find { |e| e.type == :"tool_calls.function.arguments.done" } + assert_pattern do + args_done => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: "get_weather", + index: 0, + arguments: /{.*"city".*"New York City".*}/, + parsed: nil + ] + end + + completion = listener.stream.get_final_completion + choice = completion.choices.first + + assert_pattern do + choice => { + finish_reason: :tool_calls, + index: 0, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: nil, + function_call: nil, + parsed: nil, + refusal: nil, + role: :assistant, + tool_calls: [ + { + function: { + arguments: /{.*"city".*"New York City".*}/, + name: "get_weather", + parsed: nil + }, + id: String, + type: :function + } + ] + } + } + end + end + + def test_chat_completion_state_helper + state = make_raw_stream_snapshot_request( + { + model: "gpt-4o-2024-08-06", + messages: [ + { + role: "user", + content: "What's the weather like in SF?" + } + ] + }, + load_snapshot("e2aad469b71d") + ) + + completion = state.get_final_completion + choice = completion.choices.first + + assert_pattern do + choice => { + finish_reason: :stop, + index: 0, + logprobs: nil, + message: { + annotations: nil, + audio: nil, + content: /unable to provide real-time weather/i, + function_call: nil, + parsed: nil, + refusal: nil, + role: :assistant, + tool_calls: nil + } + } + end + end +end diff --git a/test/openai/resources/chat/completions/streaming_test.rb b/test/openai/resources/chat/completions/streaming_test.rb new file mode 100644 index 00000000..7355b6c8 --- /dev/null +++ b/test/openai/resources/chat/completions/streaming_test.rb @@ -0,0 +1,1383 @@ +# frozen_string_literal: true + +require_relative "../../../test_helper" + +class OpenAI::Test::Resources::Chat::Completions::StreamingTest < Minitest::Test + extend Minitest::Serial + include WebMock::API + + def before_all + super + WebMock.enable! + end + + def after_all + WebMock.disable! + super + end + + def setup + super + @client = OpenAI::Client.new(base_url: "http://localhost", api_key: "test-key") + end + + def teardown + WebMock.reset! + super + end + + def stub_streaming_response(response_body, request_options = {}) + default_request = { + messages: [{content: "Hello", role: "user"}], + model: "gpt-4o-mini", + stream: true + } + + stub_request(:post, "http://localhost/chat/completions") + .with( + body: hash_including(default_request.merge(request_options)) + ) + .to_return( + status: 200, + headers: {"Content-Type" => "text/event-stream"}, + body: response_body + ) + end + + def basic_params + { + messages: [{content: "Hello", role: :user}], + model: "gpt-4o-mini" + } + end + + def test_basic_text_streaming + stub_streaming_response(basic_text_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + events = stream.to_a + + assert_content_delta_events( + events, + expected_deltas: ["Hello", " there!", " How", " can", " I", " help?"], + expected_snapshot: "Hello there! How can I help?" + ) + + content_done = events.find { |e| e.type == :"content.done" } + assert_pattern do + content_done => OpenAI::Helpers::Streaming::ChatContentDoneEvent[ + type: :"content.done", + content: "Hello there! How can I help?" + ] + end + + chunk_events = events.select { |e| e.type == :chunk } + assert_equal(7, chunk_events.length) + + first_chunk = chunk_events.first + assert_pattern do + first_chunk => OpenAI::Helpers::Streaming::ChatChunkEvent[ + type: :chunk, + chunk: { + id: "chatcmpl-123", + model: "gpt-4o-mini" + } + ] + end + end + + def test_text_method + stub_streaming_response(basic_text_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + text_chunks = stream.text.map do |chunk| + chunk + end + + assert_equal(["Hello", " there!", " How", " can", " I", " help?"], text_chunks) + end + + def test_get_final_completion + stub_streaming_response(completion_with_usage_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + + assert_equal("chatcmpl-123", completion.id) + assert_equal("gpt-4o-mini", completion.model) + assert_equal("Test response", completion.choices.first.message.content) + assert_equal(:stop, completion.choices.first.finish_reason) + assert_equal(12, completion.usage.total_tokens) if completion.usage + end + + def test_get_output_text + stub_streaming_response(basic_text_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + output_text = stream.get_output_text + + assert_equal("Hello there! How can I help?", output_text) + end + + def test_get_output_text_with_multiple_choices + sse_response = <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"First choice"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{"role":"assistant","content":"Second choice"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + stream = @client.chat.completions.stream(**basic_params) + output_text = stream.get_output_text + + assert_equal("First choiceSecond choice", output_text) + end + + def test_get_output_text_with_tool_calls_only + stub_streaming_response(tool_calls_only_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + output_text = stream.get_output_text + + assert_equal("", output_text) + end + + def test_streaming_with_refusal + stub_streaming_response(refusal_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + events = stream.to_a + + assert_refusal_delta_events( + events, + expected_deltas: ["I cannot", " help with that"], + expected_snapshot: "I cannot help with that" + ) + + refusal_done = events.find { |e| e.type == :"refusal.done" } + assert_pattern do + refusal_done => OpenAI::Helpers::Streaming::ChatRefusalDoneEvent[ + type: :"refusal.done", + refusal: "I cannot help with that" + ] + end + end + + def test_streaming_with_tool_calls + stub_streaming_response(tool_calls_sse_response) + + stream = @client.chat.completions.stream(**basic_params, tools: weather_tool) + events = stream.to_a + + assert_tool_call_delta_events( + events, + expected_name: "get_weather", + expected_index: 0, + expected_arguments: '{"location":"Paris","units":"celsius"}' + ) + end + + class WeatherToolModel < OpenAI::BaseModel + required :location, String + required :units, String + end + + def test_streaming_tool_call_parsed_field + sse_response = <<~SSE + data: {"id":"chatcmpl-test","object":"chat.completion.chunk","created":1,"model":"gpt-4o","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_abc","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-test","object":"chat.completion.chunk","created":1,"model":"gpt-4o","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"location\\":\\"New York\\",\\"units\\":\\"fahrenheit\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-test","object":"chat.completion.chunk","created":1,"model":"gpt-4o","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + + stream = @client.chat.completions.stream( + **basic_params, + tools: [ + { + type: :function, + function: { + name: "get_weather", + parameters: WeatherToolModel, + strict: true + } + } + ] + ) + + completion = stream.get_final_completion + + assert_pattern do + completion.choices.first.message.tool_calls.first.function => { + arguments: '{"location":"New York","units":"fahrenheit"}', + name: "get_weather", + parsed: WeatherToolModel[ + location: "New York", + units: "fahrenheit" + ] + } + end + end + + class ContentFilterTestModel < OpenAI::BaseModel + required :x, Integer + end + + def test_content_filter_with_parseable_response_format_raises + sse_response = <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Partial"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"content_filter"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + + assert_raises(OpenAI::Helpers::Streaming::ContentFilterFinishReasonError) do + @client.chat.completions.stream(**basic_params, response_format: ContentFilterTestModel).each { |_e| } # rubocop:disable Lint/EmptyBlock + end + end + + def test_content_filter_without_parseable_input_does_not_raise + sse_response = <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Partial"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"content_filter"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + + stream = @client.chat.completions.stream(**basic_params) + + # Iterating all events should not raise: + events = stream.map { |e| e } + assert(events.any? { |e| e.type == :chunk }) + + completion = stream.get_final_completion + assert_equal(:content_filter, completion.choices.first.finish_reason) + end + + def test_length_finish_reason_with_strict_tool_raises + sse_response = <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_123","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"city\\":\\"Paris\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"length"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + + tools = [ + { + type: :function, + function: { + name: "get_weather", + parameters: { + type: "object", + properties: {city: {type: "string"}}, + required: ["city"], + additionalProperties: false + }, + strict: true + } + } + ] + + assert_raises(OpenAI::LengthFinishReasonError) do + @client.chat.completions.stream(**basic_params, tools: tools).each { |_e| } # rubocop:disable Lint/EmptyBlock + end + end + + def test_azure_openai_compatibility + stub_streaming_response(azure_compatibility_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + events = stream.to_a + + content_deltas = events.select { |e| e.type == :"content.delta" } + assert_equal(1, content_deltas.length) + assert_equal("Hello", content_deltas.first.delta) + end + + def test_interleaved_multiple_tool_calls_indices + sse_response = <<~SSE + data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_a","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"city\\":\\"Paris\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_b","type":"function","function":{"name":"get_stock","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\\"ticker\\":\\"AAPL\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":",\\"units\\":\\"c\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":",\\"exchange\\":\\"NASDAQ\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-1","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + + tools = [ + { + type: :function, + function: { + name: "get_weather", + parameters: { + type: "object", + properties: {city: {type: "string"}}, + required: ["city"], + additionalProperties: false + }, + strict: true + } + }, + { + type: :function, + function: { + name: "get_stock", + parameters: { + type: "object", + properties: {ticker: {type: "string"}}, + required: ["ticker"], + additionalProperties: false + }, + strict: true + } + } + ] + + stream = @client.chat.completions.stream(**basic_params, tools: tools) + events = stream.to_a + + deltas = events.select { |e| e.type == :"tool_calls.function.arguments.delta" } + assert(deltas.any? { |e| e.index.zero? && e.name == "get_weather" }) + assert(deltas.any? { |e| e.index == 1 && e.name == "get_stock" }) + + weather_delta = deltas.find { |e| e.index.zero? && e.name == "get_weather" } + assert_pattern do + weather_delta => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDeltaEvent[ + type: :"tool_calls.function.arguments.delta", + name: "get_weather", + index: 0 + ] + end + + dones = events.select { |e| e.type == :"tool_calls.function.arguments.done" } + + weather_done = dones.find { |e| e.index.zero? } + assert_pattern do + weather_done => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: "get_weather", + index: 0, + arguments: String + ] + end + assert(weather_done.arguments.include?("city")) + + stock_done = dones.find { |e| e.index == 1 } + assert_pattern do + stock_done => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: "get_stock", + index: 1, + arguments: String + ] + end + assert(stock_done.arguments.include?("ticker")) + + completion = stream.get_final_completion + tool_calls = completion.choices.first.message.tool_calls + assert_equal(2, tool_calls.length) + end + + def test_text_method_with_tool_calls_only + stub_streaming_response(tool_calls_only_sse_response) + + stream = @client.chat.completions.stream(**basic_params) + text_chunks = stream.text.map { |chunk| chunk } + + assert_equal([], text_chunks) + end + + def test_multiple_choices_one_toolcall_one_text + sse_response = <<~SSE + data: {"id":"chatcmpl-4","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Hello"},"finish_reason":null}]} + + data: {"id":"chatcmpl-4","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_x","type":"function","function":{"name":"op","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-4","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"arg\\":1}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-4","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: {"id":"chatcmpl-4","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + stream = @client.chat.completions.stream(**basic_params) + events = stream.to_a + + content_done = events.find { |e| e.type == :"content.done" } + assert_pattern do + content_done => OpenAI::Helpers::Streaming::ChatContentDoneEvent[ + type: :"content.done", + content: "Hello" + ] + end + + tool_done = events.find { |e| e.type == :"tool_calls.function.arguments.done" } + assert_pattern do + tool_done => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: "op", + index: 0, + arguments: '{"arg":1}' + ] + end + + completion = stream.get_final_completion + assert_equal(2, completion.choices.length) + assert_equal(:stop, completion.choices[0].finish_reason) + assert_equal(:tool_calls, completion.choices[1].finish_reason) + end + + class PersonModel < OpenAI::BaseModel + required :name, String + required :age, Integer + end + + class CityWeatherModel < OpenAI::BaseModel + required :city, String + required :temperature, Integer + required :units, String + end + + def test_structured_output_streaming + stub_structured_output_request + + stream = @client.chat.completions.stream( + messages: [{content: "Generate a person", role: :user}], + model: "gpt-4o-mini", + response_format: PersonModel + ) + + content_deltas = [] + content_done = nil + + stream.each do |event| + content_deltas << event if event.type == :"content.delta" + content_done = event if event.type == :"content.done" + end + + assert_equal(3, content_deltas.length) + assert_equal( + [ + '{"name":', + '{"name":"John",', + '{"name":"John","age":30}' + ], + content_deltas.map(&:snapshot) + ) + + assert_pattern do + content_done => OpenAI::Helpers::Streaming::ChatContentDoneEvent[ + type: :"content.done", + content: '{"name":"John","age":30}', + parsed: PersonModel[ + name: "John", + age: 30 + ] + ] + end + end + + def test_structured_output_parsed_in_final_completion + stub_structured_output_request + + stream = @client.chat.completions.stream( + messages: [{content: "Generate a person", role: :user}], + model: "gpt-4o-mini", + response_format: PersonModel + ) + completion = stream.get_final_completion + + return unless completion.choices.first.message.parsed + assert_pattern do + completion.choices.first.message => { + parsed: PersonModel[ + name: "John", + age: 30 + ] + } + end + end + + private + + def weather_tool + [ + { + type: :function, + function: { + name: "get_weather", + parameters: { + type: "object", + properties: { + city: {type: "string"}, + units: {type: "string"} + }, + required: ["city"], + additionalProperties: false + }, + strict: true + } + } + ] + end + + def assert_content_delta_events(events, expected_deltas:, expected_snapshot:) + content_deltas = events.select { |e| e.type == :"content.delta" } + + assert_equal(expected_deltas.length, content_deltas.length, "Incorrect number of content delta events") + assert_equal(expected_deltas, content_deltas.map(&:delta), "Incorrect delta values") + assert_equal(expected_snapshot, content_deltas.last.snapshot, "Incorrect final snapshot") + + first_delta = content_deltas.first + assert_instance_of(OpenAI::Helpers::Streaming::ChatContentDeltaEvent, first_delta) + assert_equal(:"content.delta", first_delta.type) + assert_equal(expected_deltas.first, first_delta.delta) + assert_equal(expected_deltas.first, first_delta.snapshot) + end + + def assert_refusal_delta_events(events, expected_deltas:, expected_snapshot:) + refusal_deltas = events.select { |e| e.type == :"refusal.delta" } + + assert_equal(expected_deltas.length, refusal_deltas.length, "Incorrect number of refusal delta events") + assert_equal(expected_deltas, refusal_deltas.map(&:delta), "Incorrect delta values") + assert_equal(expected_snapshot, refusal_deltas.last.snapshot, "Incorrect final snapshot") + + first_delta = refusal_deltas.first + assert_instance_of(OpenAI::Helpers::Streaming::ChatRefusalDeltaEvent, first_delta) + assert_equal(:"refusal.delta", first_delta.type) + assert_equal(expected_deltas.first, first_delta.delta) + assert_equal(expected_deltas.first, first_delta.snapshot) + end + + def assert_tool_call_delta_events(events, expected_name:, expected_index:, expected_arguments:) # rubocop:disable Lint/UnusedMethodArgument + tool_deltas = events.select { |e| e.type == :"tool_calls.function.arguments.delta" } + assert(tool_deltas.length.positive?, "No tool call delta events found") + + first_tool_delta = tool_deltas.first + assert_pattern do + first_tool_delta => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDeltaEvent[ + type: :"tool_calls.function.arguments.delta", + name: expected_name, + index: expected_index + ] + end + + tool_done = events.find { |e| e.type == :"tool_calls.function.arguments.done" } + assert_pattern do + tool_done => OpenAI::Helpers::Streaming::ChatFunctionToolCallArgumentsDoneEvent[ + type: :"tool_calls.function.arguments.done", + name: expected_name, + index: expected_index, + arguments: expected_arguments + ] + end + end + + def stub_structured_output_request + stub_request(:post, "http://localhost/chat/completions") + .with( + body: hash_including( + "messages" => [{"content" => "Generate a person", "role" => "user"}], + "model" => "gpt-4o-mini", + "stream" => true, + "response_format" => { + "type" => "json_schema", + "json_schema" => { + "strict" => true, + "name" => "PersonModel", + "schema" => hash_including("type" => "object") + } + } + ) + ) + .to_return( + status: 200, + headers: {"Content-Type" => "text/event-stream"}, + body: structured_output_sse_response + ) + end + + def test_empty_stream_response + sse_response = <<~SSE + data: {"id":"chatcmpl-5","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + + data: {"id":"chatcmpl-5","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + stream = @client.chat.completions.stream(**basic_params) + events = stream.to_a + + content_done = events.find { |e| e.type == :"content.done" } + assert_pattern do + content_done => OpenAI::Helpers::Streaming::ChatContentDoneEvent[ + type: :"content.done", + content: "" + ] + end + + completion = stream.get_final_completion + assert_equal("", completion.choices.first.message.content) + end + + def test_malformed_sse_data_recovery + sse_response = <<~SSE + data: {"id":"chatcmpl-6","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Start"},"finish_reason":null}]} + + data: malformed json data here + + data: {"id":"chatcmpl-6","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" continues"},"finish_reason":null}]} + + data: {"id":"chatcmpl-6","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + stream = @client.chat.completions.stream(**basic_params) + + events = [] + begin + stream.each { |e| events << e } + rescue JSON::ParserError + # Expected - malformed JSON should be skipped + end + + content_deltas = events.select { |e| e.type == :"content.delta" } + assert(content_deltas.any?) + end + + def test_stream_with_usage_in_stream_options + sse_response = <<~SSE + data: {"id":"chatcmpl-7","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Test"},"finish_reason":null}]} + + data: {"id":"chatcmpl-7","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}],"usage":null} + + data: {"id":"chatcmpl-7","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[],"usage":{"prompt_tokens":15,"completion_tokens":1,"total_tokens":16,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, stream_options: {include_usage: true}) + + stream = @client.chat.completions.stream( + **basic_params, + stream_options: {include_usage: true} + ) + completion = stream.get_final_completion + + assert_equal(15, completion.usage.prompt_tokens) + assert_equal(1, completion.usage.completion_tokens) + assert_equal(16, completion.usage.total_tokens) + end + + def test_tool_choice_auto + sse_response = <<~SSE + data: {"id":"chatcmpl-8","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"I'll check the weather"},"finish_reason":null}]} + + data: {"id":"chatcmpl-8","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, tools: weather_tool, tool_choice: "auto") + + stream = @client.chat.completions.stream( + **basic_params, + tools: weather_tool, + tool_choice: "auto" + ) + completion = stream.get_final_completion + + assert_equal("I'll check the weather", completion.choices.first.message.content) + assert_nil(completion.choices.first.message.tool_calls) + end + + def test_tool_choice_required + sse_response = <<~SSE + data: {"id":"chatcmpl-9","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_req","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-9","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"city\\":\\"NYC\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-9","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, tools: weather_tool, tool_choice: "required") + + stream = @client.chat.completions.stream( + **basic_params, + tools: weather_tool, + tool_choice: "required" + ) + completion = stream.get_final_completion + + assert_nil(completion.choices.first.message.content) + assert_equal(1, completion.choices.first.message.tool_calls.length) + assert_equal("get_weather", completion.choices.first.message.tool_calls.first.function.name) + end + + def test_tool_choice_specific_function + sse_response = <<~SSE + data: {"id":"chatcmpl-10","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_spec","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-10","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"city\\":\\"Boston\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-10","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response( + sse_response, + tools: weather_tool, + tool_choice: {type: :function, function: {name: "get_weather"}} + ) + + stream = @client.chat.completions.stream( + **basic_params, + tools: weather_tool, + tool_choice: {type: :function, function: {name: "get_weather"}} + ) + completion = stream.get_final_completion + + assert_equal("get_weather", completion.choices.first.message.tool_calls.first.function.name) + end + + def test_parallel_tool_calls + sse_response = <<~SSE + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_a","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"city\\":\\"NYC\\""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":",\\"units\\":\\"c\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_b","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\\"city\\":\\"LA\\""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":",\\"units\\":\\"c\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":2,"id":"call_c","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":2,"function":{"arguments":"{\\"city\\":\\"SF\\""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":2,"function":{"arguments":",\\"units\\":\\"c\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-11","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + stream = @client.chat.completions.stream(**basic_params, tools: weather_tool) + events = stream.to_a + + done_events = events.select { |e| e.type == :"tool_calls.function.arguments.done" } + assert_equal(3, done_events.length) + + done_events.each_with_index do |event, idx| + refute_nil(event.arguments, "Arguments should not be nil for index #{idx}") + refute_equal("", event.arguments, "Arguments should not be empty for index #{idx}") + end + + cities = done_events.map { |e| JSON.parse(e.arguments)["city"] } + assert_equal(%w[NYC LA SF], cities) + + completion = stream.get_final_completion + assert_equal(3, completion.choices.first.message.tool_calls.length) + + completion.choices.first.message.tool_calls.each do |tool_call| + assert_equal("get_weather", tool_call.function.name) + end + end + + def test_streaming_with_stop_sequences + sse_response = <<~SSE + data: {"id":"chatcmpl-12","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Line 1"},"finish_reason":null}]} + + data: {"id":"chatcmpl-12","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, stop: ["\\n", "END"]) + + stream = @client.chat.completions.stream( + **basic_params, + stop: ["\\n", "END"] + ) + completion = stream.get_final_completion + + assert_equal("Line 1", completion.choices.first.message.content) + assert_equal(:stop, completion.choices.first.finish_reason) + end + + def test_streaming_with_max_completion_tokens + sse_response = <<~SSE + data: {"id":"chatcmpl-13","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Truncated"},"finish_reason":null}]} + + data: {"id":"chatcmpl-13","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"length"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, max_completion_tokens: 5) + + stream = @client.chat.completions.stream( + **basic_params, + max_completion_tokens: 5 + ) + completion = stream.get_final_completion + + assert_equal("Truncated", completion.choices.first.message.content) + assert_equal(:length, completion.choices.first.finish_reason) + end + + def test_streaming_with_system_message + sse_response = <<~SSE + data: {"id":"chatcmpl-14","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Professional response"},"finish_reason":null}]} + + data: {"id":"chatcmpl-14","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response( + sse_response, + messages: [ + {content: "You are a helpful assistant", role: "system"}, + {content: "Hello", role: "user"} + ] + ) + + stream = @client.chat.completions.stream( + messages: [ + {content: "You are a helpful assistant", role: :system}, + {content: "Hello", role: :user} + ], + model: "gpt-4o-mini" + ) + completion = stream.get_final_completion + + assert_equal("Professional response", completion.choices.first.message.content) + end + + def test_streaming_with_seed_parameter + sse_response = <<~SSE + data: {"id":"chatcmpl-15","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","system_fingerprint":"fp_seed123","choices":[{"index":0,"delta":{"role":"assistant","content":"Deterministic"},"finish_reason":null}]} + + data: {"id":"chatcmpl-15","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, seed: 42) + + stream = @client.chat.completions.stream( + **basic_params, + seed: 42 + ) + completion = stream.get_final_completion + + assert_equal("Deterministic", completion.choices.first.message.content) + end + + def test_streaming_with_temperature_and_top_p + sse_response = <<~SSE + data: {"id":"chatcmpl-16","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Creative output"},"finish_reason":null}]} + + data: {"id":"chatcmpl-16","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, temperature: 0.8, top_p: 0.9) + + stream = @client.chat.completions.stream( + **basic_params, + temperature: 0.8, + top_p: 0.9 + ) + completion = stream.get_final_completion + + assert_equal("Creative output", completion.choices.first.message.content) + end + + def test_streaming_with_frequency_and_presence_penalty + sse_response = <<~SSE + data: {"id":"chatcmpl-17","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Varied vocabulary"},"finish_reason":null}]} + + data: {"id":"chatcmpl-17","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, frequency_penalty: 0.5, presence_penalty: 0.3) + + stream = @client.chat.completions.stream( + **basic_params, + frequency_penalty: 0.5, + presence_penalty: 0.3 + ) + completion = stream.get_final_completion + + assert_equal("Varied vocabulary", completion.choices.first.message.content) + end + + def test_streaming_with_user_parameter + sse_response = <<~SSE + data: {"id":"chatcmpl-18","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"User-specific"},"finish_reason":null}]} + + data: {"id":"chatcmpl-18","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, user: "user-123") + + stream = @client.chat.completions.stream( + **basic_params, + user: "user-123" + ) + completion = stream.get_final_completion + + assert_equal("User-specific", completion.choices.first.message.content) + end + + def test_invalid_json_in_tool_arguments_recovery + sse_response = <<~SSE + data: {"id":"chatcmpl-19","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_invalid","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-19","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"city\\": \\"SF\\", invalid json here"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-19","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response) + stream = @client.chat.completions.stream(**basic_params, tools: weather_tool) + + events = [] + begin + stream.each { |e| events << e } + rescue JSON::ParserError + # Expected - invalid JSON in tool arguments + end + + done_event = events.find { |e| e.type == :"tool_calls.function.arguments.done" } + assert(done_event) + assert(done_event.arguments.include?("city")) + end + + def test_streaming_with_response_format_text + sse_response = <<~SSE + data: {"id":"chatcmpl-20","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Plain text"},"finish_reason":null}]} + + data: {"id":"chatcmpl-20","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, response_format: {type: "text"}) + + stream = @client.chat.completions.stream( + **basic_params, + response_format: {type: "text"} + ) + completion = stream.get_final_completion + + assert_equal("Plain text", completion.choices.first.message.content) + assert_nil(completion.choices.first.message.parsed) + end + + def test_streaming_with_logit_bias + sse_response = <<~SSE + data: {"id":"chatcmpl-21","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Biased output"},"finish_reason":null}]} + + data: {"id":"chatcmpl-21","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response, logit_bias: {"50256" => -100}) + + stream = @client.chat.completions.stream( + **basic_params, + logit_bias: {"50256" => -100} + ) + completion = stream.get_final_completion + + assert_equal("Biased output", completion.choices.first.message.content) + end + + def test_delta_accumulation_edge_cases + # Test 1: String accumulation across multiple chunks + sse_response_string_concat = <<~SSE + data: {"id":"chatcmpl-100","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"First"},"finish_reason":null}]} + + data: {"id":"chatcmpl-100","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" Second"},"finish_reason":null}]} + + data: {"id":"chatcmpl-100","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" Third"},"finish_reason":null}]} + + data: {"id":"chatcmpl-100","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_string_concat) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + assert_equal("First Second Third", completion.choices.first.message.content) + + # Test 2: Numeric accumulation in usage tokens + sse_response_numeric = <<~SSE + data: {"id":"chatcmpl-101","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Hi"},"finish_reason":null}],"usage":{"prompt_tokens":5,"completion_tokens":1,"total_tokens":6}} + + data: {"id":"chatcmpl-101","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}],"usage":{"prompt_tokens":0,"completion_tokens":1,"total_tokens":1}} + + data: {"id":"chatcmpl-101","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0}} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_numeric) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + # Usage accumulation should sum numeric values + assert_equal("Hi", completion.choices.first.message.content) + + # Test 3: Array accumulation with indexed tool calls (testing index property replacement) + sse_response_indexed_arrays = <<~SSE + data: {"id":"chatcmpl-102","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_a","type":"function","function":{"name":"func1","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-102","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"a\\":"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-102","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_b","type":"function","function":{"name":"func2","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-102","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"1}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-102","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\\"b\\":2}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-102","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_indexed_arrays) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + tool_calls = completion.choices.first.message.tool_calls + assert_equal(2, tool_calls.length) + assert_equal("func1", tool_calls[0].function.name) + assert_equal('{"a":1}', tool_calls[0].function.arguments) + assert_equal("func2", tool_calls[1].function.name) + assert_equal('{"b":2}', tool_calls[1].function.arguments) + + # Test 4: Nested object accumulation with recursive merging + sse_response_nested = <<~SSE + data: {"id":"chatcmpl-103","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"{\\"data\\":{\\"nested\\":{\\"value\\":"},"finish_reason":null}]} + + data: {"id":"chatcmpl-103","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"42"},"finish_reason":null}]} + + data: {"id":"chatcmpl-103","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"}}}"},"finish_reason":null}]} + + data: {"id":"chatcmpl-103","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_nested) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + assert_equal('{"data":{"nested":{"value":42}}}', completion.choices.first.message.content) + + # Test 5: Type property replacement (not accumulation) + sse_response_type_replacement = <<~SSE + data: {"id":"chatcmpl-104","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_x","type":"function","function":{"name":"test","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-104","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"type":"function","function":{"arguments":"{\\"x\\":1}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-104","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_type_replacement) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + tool_call = completion.choices.first.message.tool_calls.first + # Type should remain "function" (last value wins due to replacement logic) + assert_equal(:function, tool_call.type) + assert_equal('{"x":1}', tool_call.function.arguments) + + # Test 6: Multiple choices with different index values + sse_response_multiple_choices = <<~SSE + data: {"id":"chatcmpl-105","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Choice 0"},"finish_reason":null}]} + + data: {"id":"chatcmpl-105","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{"role":"assistant","content":"Choice 1"},"finish_reason":null}]} + + data: {"id":"chatcmpl-105","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" continued"},"finish_reason":null}]} + + data: {"id":"chatcmpl-105","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{"content":" also"},"finish_reason":null}]} + + data: {"id":"chatcmpl-105","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: {"id":"chatcmpl-105","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":1,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_multiple_choices) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + assert_equal(2, completion.choices.length) + assert_equal("Choice 0 continued", completion.choices[0].message.content) + assert_equal("Choice 1 also", completion.choices[1].message.content) + + # Test 7: Interleaved updates to same indexed tool calls + sse_response_interleaved = <<~SSE + data: {"id":"chatcmpl-106","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_a","type":"function","function":{"name":"func1","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-106","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_b","type":"function","function":{"name":"func2","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-106","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"x\\":"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-106","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\\"y\\":"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-106","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"1}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-106","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"2}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-106","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_interleaved) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + tool_calls = completion.choices.first.message.tool_calls + assert_equal(2, tool_calls.length) + # Interleaved updates should accumulate properly for each index + assert_equal("func1", tool_calls[0].function.name) + assert_equal('{"x":1}', tool_calls[0].function.arguments) + assert_equal("func2", tool_calls[1].function.name) + assert_equal('{"y":2}', tool_calls[1].function.arguments) + + # Test 8: Empty deltas and nil handling + sse_response_empty_deltas = <<~SSE + data: {"id":"chatcmpl-107","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant"},"finish_reason":null}]} + + data: {"id":"chatcmpl-107","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":null}]} + + data: {"id":"chatcmpl-107","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":""},"finish_reason":null}]} + + data: {"id":"chatcmpl-107","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"Final"},"finish_reason":null}]} + + data: {"id":"chatcmpl-107","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_empty_deltas) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + # Empty strings should concatenate properly. + assert_equal("Final", completion.choices.first.message.content) + + # Test 9: Mixed content and tool calls in same choice + sse_response_mixed = <<~SSE + data: {"id":"chatcmpl-108","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Calling function"},"finish_reason":null}]} + + data: {"id":"chatcmpl-108","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" now","tool_calls":[{"index":0,"id":"call_mix","type":"function","function":{"name":"mixed","arguments":"{\\"data\\":true}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-108","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + + stub_streaming_response(sse_response_mixed) + stream = @client.chat.completions.stream(**basic_params) + completion = stream.get_final_completion + message = completion.choices.first.message + assert_equal("Calling function now", message.content) + assert_equal(1, message.tool_calls.length) + assert_equal("mixed", message.tool_calls.first.function.name) + end + + def basic_text_sse_response + <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Hello"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" there!"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" How"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" can"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" I"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" help?"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + end + + def completion_with_usage_sse_response + <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","system_fingerprint":"fp_123","choices":[{"index":0,"delta":{"role":"assistant","content":"Test"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" response"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}],"usage":{"prompt_tokens":10,"completion_tokens":2,"total_tokens":12}} + + data: [DONE] + + SSE + end + + def refusal_sse_response + <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","refusal":"I cannot"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"refusal":" help with that"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + end + + def tool_calls_sse_response + <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_123","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\\"location\\":"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\"Paris\\","}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\"units\\":\\"celsius\\"}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + end + + def azure_compatibility_sse_response + <<~SSE + data: {"id":"chatcmpl-123","object":"","created":1234567890,"model":"gpt-4o-mini","choices":[]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"Hello"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + end + + def tool_calls_only_sse_response + <<~SSE + data: {"id":"chatcmpl-2","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","tool_calls":[{"index":0,"id":"call_1","type":"function","function":{"name":"noop","arguments":"{}"}}]},"finish_reason":null}]} + + data: {"id":"chatcmpl-2","object":"chat.completion.chunk","created":1,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]} + + data: [DONE] + + SSE + end + + def structured_output_sse_response + <<~SSE + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":"{\\"name\\":"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"\\"John\\","},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"\\"age\\":30}"},"finish_reason":null}]} + + data: {"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + + data: [DONE] + + SSE + end +end diff --git a/test/openai/resources/responses/streaming_test.rb b/test/openai/resources/responses/streaming_test.rb index 7bc282b1..fbf1ef51 100644 --- a/test/openai/resources/responses/streaming_test.rb +++ b/test/openai/resources/responses/streaming_test.rb @@ -127,6 +127,29 @@ def test_get_output_text_with_no_text_content assert_equal("", text) end + class WeatherModel < OpenAI::BaseModel + required :location, String + required :temperature, Integer + end + + def test_function_tool_parsed_field + stub_streaming_response(function_calling_sse_response) + + stream = @client.responses.stream(**function_tool_params) + response = stream.get_final_response + + function_call = response.output.find { |o| o.is_a?(OpenAI::Models::Responses::ResponseFunctionToolCall) } + + assert_pattern do + function_call => OpenAI::Models::Responses::ResponseFunctionToolCall[ + parsed: WeatherModel[ + location: "San Francisco", + temperature: 72 + ] + ] + end + end + def test_early_stream_close stub_streaming_response(basic_text_sse_response) @@ -141,11 +164,6 @@ def test_early_stream_close refute(events.any? { |e| e.type == :"response.completed" }) end - class WeatherModel < OpenAI::BaseModel - required :location, String - required :temperature, Integer - end - def test_structured_output_streaming stub_streaming_response(structured_output_sse_response) @@ -198,8 +216,8 @@ def test_function_calling_streaming assert_function_delta_events( events, - expected_deltas: ['{"location":"', "San Francisco", '"}'], - expected_snapshot: '{"location":"San Francisco"}' + expected_deltas: ['{"location":"', "San Francisco", '","temperature":', "72}"], + expected_snapshot: '{"location":"San Francisco","temperature":72}' ) end @@ -418,7 +436,7 @@ def function_tool_params basic_params.merge( tools: [ { - type: "function", + type: :function, function: { name: "get_weather", description: "Get weather for a location", @@ -594,16 +612,19 @@ def function_calling_sse_response data: {"type":"response.function_call_arguments.delta","sequence_number":4,"item_id":"item_003","output_index":0,"delta":"San Francisco"} event: response.function_call_arguments.delta - data: {"type":"response.function_call_arguments.delta","sequence_number":5,"item_id":"item_003","output_index":0,"delta":"\\"}"} + data: {"type":"response.function_call_arguments.delta","sequence_number":5,"item_id":"item_003","output_index":0,"delta":"\\",\\"temperature\\":"} + + event: response.function_call_arguments.delta + data: {"type":"response.function_call_arguments.delta","sequence_number":6,"item_id":"item_003","output_index":0,"delta":"72}"} event: response.function_call_arguments.done - data: {"type":"response.function_call_arguments.done","sequence_number":6,"item_id":"item_003","output_index":0,"arguments":"{\\"location\\":\\"San Francisco\\"}"} + data: {"type":"response.function_call_arguments.done","sequence_number":7,"item_id":"item_003","output_index":0,"arguments":"{\\"location\\":\\"San Francisco\\",\\"temperature\\":72}"} event: response.output_item.done - data: {"type":"response.output_item.done","sequence_number":7,"response_id":"msg_003","item_id":"item_003","output_index":0,"item":{"id":"item_003","object":"realtime.item","type":"function_call","status":"completed","name":"get_weather","arguments":"{\\"location\\":\\"San Francisco\\"}","call_id":"call_001"}} + data: {"type":"response.output_item.done","sequence_number":8,"response_id":"msg_003","item_id":"item_003","output_index":0,"item":{"id":"item_003","object":"realtime.item","type":"function_call","status":"completed","name":"get_weather","arguments":"{\\"location\\":\\"San Francisco\\",\\"temperature\\":72}","call_id":"call_001"}} event: response.completed - data: {"type":"response.completed","sequence_number":8,"response":{"id":"msg_003","object":"realtime.response","status":"completed","status_details":null,"output":[{"id":"item_003","object":"realtime.item","type":"function_call","status":"completed","name":"get_weather","arguments":"{\\"location\\":\\"San Francisco\\"}","call_id":"call_001"}],"usage":{"total_tokens":20,"input_tokens":10,"output_tokens":10},"metadata":null}} + data: {"type":"response.completed","sequence_number":9,"response":{"id":"msg_003","object":"realtime.response","status":"completed","status_details":null,"output":[{"id":"item_003","object":"realtime.item","type":"function_call","status":"completed","name":"get_weather","arguments":"{\\"location\\":\\"San Francisco\\",\\"temperature\\":72}","call_id":"call_001"}],"usage":{"total_tokens":20,"input_tokens":10,"output_tokens":10},"metadata":null}} SSE end