Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.26.0"
".": "0.27.0"
}
24 changes: 24 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,29 @@
# Changelog

## 0.27.0 (2025-09-26)

Full Changelog: [v0.26.0...v0.27.0](https://github.com/openai/openai-ruby/compare/v0.26.0...v0.27.0)

### Features

* chat completion streaming helpers ([#828](https://github.com/openai/openai-ruby/issues/828)) ([6e98424](https://github.com/openai/openai-ruby/commit/6e9842485e819876dd6b78107fa45f1a5da67e4f))


### Bug Fixes

* **internal:** use null byte as file separator in the fast formatting script ([151ffe1](https://github.com/openai/openai-ruby/commit/151ffe10c9dc8d5edaf46de2a1c6b6e6fda80034))
* shorten multipart boundary sep to less than RFC specificed max length ([d7770d1](https://github.com/openai/openai-ruby/commit/d7770d10ee3b093d8e2464b79e0e12be3a9d2beb))


### Performance Improvements

* faster code formatting ([67da711](https://github.com/openai/openai-ruby/commit/67da71139e5b572c97539299c39bae04c1d569fd))


### Chores

* allow fast-format to use bsd sed as well ([66ac913](https://github.com/openai/openai-ruby/commit/66ac913d195d8b5a5c4474ded88a5f9dad13b7b6))

## 0.26.0 (2025-09-23)

Full Changelog: [v0.25.1...v0.26.0](https://github.com/openai/openai-ruby/compare/v0.25.1...v0.26.0)
Expand Down
2 changes: 1 addition & 1 deletion Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ GIT
PATH
remote: .
specs:
openai (0.26.0)
openai (0.27.0)
connection_pool

GEM
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application
<!-- x-release-please-start-version -->

```ruby
gem "openai", "~> 0.26.0"
gem "openai", "~> 0.27.0"
```

<!-- x-release-please-end -->
Expand Down
28 changes: 20 additions & 8 deletions Rakefile
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ tapioca = "sorbet/tapioca"
examples = "examples"
ignore_file = ".ignore"

FILES_ENV = "FORMAT_FILE"

CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file)

CLOBBER.push(*%w[sorbet/rbi/annotations/ sorbet/rbi/gems/], tapioca)
Expand All @@ -38,6 +40,14 @@ end
xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --]
ruby_opt = {"RUBYOPT" => [ENV["RUBYOPT"], "--encoding=UTF-8"].compact.join(" ")}

filtered = ->(ext, dirs) do
if ENV.key?(FILES_ENV)
%w[sed -E -n -e] << "/\\.#{ext}$/p" << "--" << ENV.fetch(FILES_ENV)
else
(%w[find] + dirs + %w[-type f -and -name]) << "*.#{ext}" << "-print0"
end
end

desc("Lint `*.rb(i)`")
multitask(:"lint:rubocop") do
find = %w[find ./lib ./test ./rbi ./examples -type f -and ( -name *.rb -or -name *.rbi ) -print0]
Expand All @@ -52,24 +62,26 @@ multitask(:"lint:rubocop") do
sh("#{find.shelljoin} | #{lint.shelljoin}")
end

norm_lines = %w[tr -- \n \0].shelljoin

desc("Format `*.rb`")
multitask(:"format:rb") do
# while `syntax_tree` is much faster than `rubocop`, `rubocop` is the only formatter with full syntax support
find = %w[find ./lib ./test ./examples -type f -and -name *.rb -print0]
files = filtered["rb", %w[./lib ./test ./examples]]
fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --]
sh("#{find.shelljoin} | #{fmt.shelljoin}")
sh("#{files.shelljoin} | #{norm_lines} | #{fmt.shelljoin}")
end

desc("Format `*.rbi`")
multitask(:"format:rbi") do
find = %w[find ./rbi -type f -and -name *.rbi -print0]
files = filtered["rbi", %w[./rbi]]
fmt = xargs + %w[stree write --]
sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}")
sh(ruby_opt, "#{files.shelljoin} | #{norm_lines} | #{fmt.shelljoin}")
end

desc("Format `*.rbs`")
multitask(:"format:rbs") do
find = %w[find ./sig -type f -name *.rbs -print0]
files = filtered["rbs", %w[./sig]]
inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? ["-i", ""] : %w[-i]
uuid = SecureRandom.uuid

Expand Down Expand Up @@ -98,13 +110,13 @@ multitask(:"format:rbs") do
success = false

# transform class aliases to type aliases, which syntax tree has no trouble with
sh("#{find.shelljoin} | #{pre.shelljoin}")
sh("#{files.shelljoin} | #{norm_lines} | #{pre.shelljoin}")
# run syntax tree to format `*.rbs` files
sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}") do
sh(ruby_opt, "#{files.shelljoin} | #{norm_lines} | #{fmt.shelljoin}") do
success = _1
end
# transform type aliases back to class aliases
sh("#{find.shelljoin} | #{pst.shelljoin}")
sh("#{files.shelljoin} | #{norm_lines} | #{pst.shelljoin}")

# always run post-processing to remove comment marker
fail unless success
Expand Down
23 changes: 23 additions & 0 deletions examples/chat/streaming_basic.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require_relative "../../lib/openai"

# gets API Key from environment variable `OPENAI_API_KEY`
client = OpenAI::Client.new

stream = client.chat.completions.stream(
model: "gpt-4o-mini",
messages: [
{role: :user, content: "Write a creative haiku about the ocean."}
]
)

stream.each do |event|
case event
when OpenAI::Streaming::ChatContentDeltaEvent
print(event.delta)
when OpenAI::Streaming::ChatContentDoneEvent
puts
end
end
58 changes: 58 additions & 0 deletions examples/chat/streaming_follow_up.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require_relative "../../lib/openai"

# gets API Key from environment variable `OPENAI_API_KEY`
client = OpenAI::Client.new

# This example demonstrates how to start a new streamed chat completion that includes prior turns by
# resending the conversation messages.
#
# 1. Start with an initial user turn and stream the assistant reply.
messages = [
{role: :user, content: "Tell me a short story about a robot. Stop after 2 sentences."}
]

puts "First streamed completion:"
assistant_text = ""

stream1 = client.chat.completions.stream(
model: "gpt-4o-mini",
messages: messages
)

stream1.each do |event|
case event
when OpenAI::Streaming::ChatContentDeltaEvent
assistant_text += event.delta
print(event.delta)
when OpenAI::Streaming::ChatContentDoneEvent
puts
end
end

# 2. Start a new streamed completion that includes the prior assistant turn
# and adds a follow-up user instruction.
messages << {role: :assistant, content: assistant_text}
messages << {role: :user, content: "Continue the story with 2 more sentences while keeping the same style."}

puts
puts "Second streamed completion (with prior turns included):"

stream2 = client.chat.completions.stream(
model: "gpt-4o-mini",
messages: messages
)

stream2.each do |event|
case event
when OpenAI::Streaming::ChatContentDeltaEvent
print(event.delta)
when OpenAI::Streaming::ChatContentDoneEvent
puts
end
end

puts
puts "Done. The second stream is a new completion that used the prior turns as context."
32 changes: 32 additions & 0 deletions examples/chat/streaming_logprobs.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require_relative "../../lib/openai"

# gets API Key from environment variable `OPENAI_API_KEY`
client = OpenAI::Client.new

stream = client.chat.completions.stream(
model: "gpt-4o-mini",
logprobs: true,
top_logprobs: 3,
messages: [
{role: :user, content: "Finish the sentence: The capital of France is"}
]
)

stream.each do |event|
case event
when OpenAI::Streaming::ChatContentDeltaEvent
print(event.delta)
when OpenAI::Streaming::ChatLogprobsContentDeltaEvent
# Print top logprobs for the last token in the delta
tokens = event.content
last = tokens.last
next unless last
alts = last.top_logprobs.map { |t| "#{t.token}=#{format('%.2f', t.logprob)}" }.join(", ")
puts("\nlogprobs: [#{alts}]")
when OpenAI::Streaming::ChatLogprobsContentDoneEvent
puts("\n--- logprobs collection finished (#{event.content.length} tokens) ---")
end
end
43 changes: 43 additions & 0 deletions examples/chat/streaming_multi_choice.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require_relative "../../lib/openai"

# gets API Key from environment variable `OPENAI_API_KEY`
client = OpenAI::Client.new

stream = client.chat.completions.stream(
model: "gpt-4o-mini",
n: 2,
messages: [
{role: :user, content: "Give me two short taglines for a beach resort."}
]
)

choice_contents = {}
choice_finished = {}

stream.each do |event|
case event
when OpenAI::Streaming::ChatChunkEvent
# Access the full snapshot with all choices:
event.snapshot.choices.each_with_index do |choice, index|
if choice.message.content
choice_contents[index] = choice.message.content
end

next unless choice.finish_reason && !choice_finished[index]
choice_finished[index] = true
# Print the complete content for this choice when it finishes:
puts("[choice #{index}] complete:")
puts(choice_contents[index])
puts("--- choice #{index} done ---")
puts
end
end
end

puts("------ final choices ------")
choice_contents.keys.sort.each do |i|
puts("[#{i}] #{choice_contents[i]}")
end
46 changes: 46 additions & 0 deletions examples/chat/streaming_structured_outputs.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require_relative "../../lib/openai"

class Step < OpenAI::BaseModel
required :explanation, String
required :output, String
end

class MathResponse < OpenAI::BaseModel
required :steps, OpenAI::ArrayOf[Step]
required :final_answer, String
end

client = OpenAI::Client.new

stream = client.chat.completions.stream(
model: "gpt-4o-mini",
response_format: MathResponse,
messages: [
{role: :user, content: "solve 8x + 31 = 2, show all steps"}
]
)

stream.each do |event|
case event
when OpenAI::Streaming::ChatContentDeltaEvent
print(event.delta)
when OpenAI::Streaming::ChatContentDoneEvent
puts
puts("--- parsed object ---")
pp(event.parsed)
end
end

response = stream.get_final_completion

puts
puts("----- parsed outputs from final response -----")
response
.choices
.each do |choice|
# parsed is an instance of `MathResponse`
pp(choice.message.parsed)
end
19 changes: 19 additions & 0 deletions examples/chat/streaming_text.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require_relative "../../lib/openai"

# gets API Key from environment variable `OPENAI_API_KEY`
client = OpenAI::Client.new

stream = client.chat.completions.stream(
model: "gpt-4o-mini",
messages: [
{role: :user, content: "List three fun facts about dolphins."}
]
)

stream.text.each do |text|
print(text)
end
puts
32 changes: 32 additions & 0 deletions examples/chat/streaming_tools.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require_relative "../../lib/openai"

class GetWeather < OpenAI::BaseModel
required :location, String
end

# gets API Key from environment variable `OPENAI_API_KEY`
client = OpenAI::Client.new

stream = client.chat.completions.stream(
model: "gpt-4o-mini",
tools: [GetWeather],
messages: [
{role: :user, content: "Call get_weather with location San Francisco in JSON."}
]
)

stream.each do |event|
case event
when OpenAI::Streaming::ChatFunctionToolCallArgumentsDeltaEvent
puts("delta: #{event.arguments_delta}")
pp(event.parsed)
when OpenAI::Streaming::ChatFunctionToolCallArgumentsDoneEvent
puts("--- Tool call finalized ---")
puts("name: #{event.name}")
puts("args: #{event.arguments}")
pp(event.parsed)
end
end
Loading