Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.23.3"
".": "0.24.0"
}
6 changes: 3 additions & 3 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 118
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d30ff992a48873c1466c49f3c01f2ec8933faebff23424748f8d056065b1bcef.yml
openapi_spec_hash: e933ec43b46f45c348adb78840e5808d
config_hash: bf45940f0a7805b4ec2017eecdd36893
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-380330a93b5d010391ca3b36ea193c5353b0dfdf2ddd02789ef84a84ce427e82.yml
openapi_spec_hash: 859703234259ecdd2a3c6f4de88eb504
config_hash: b619b45c1e7facf819f902dee8fa4f97
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# Changelog

## 0.24.0 (2025-09-17)

Full Changelog: [v0.23.3...v0.24.0](https://github.com/openai/openai-ruby/compare/v0.23.3...v0.24.0)

### Features

* **api:** type updates for conversations, reasoning_effort and results for evals ([ee17642](https://github.com/openai/openai-ruby/commit/ee17642d7319dacb933a41ae9f1edae2a200762f))
* expose response headers for both streams and errors ([a158fd6](https://github.com/openai/openai-ruby/commit/a158fd66b22a5586f4a45301ff96e40f8d52fe8c))

## 0.23.3 (2025-09-15)

Full Changelog: [v0.23.2...v0.23.3](https://github.com/openai/openai-ruby/compare/v0.23.2...v0.23.3)
Expand Down
2 changes: 1 addition & 1 deletion Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ GIT
PATH
remote: .
specs:
openai (0.23.3)
openai (0.24.0)
connection_pool

GEM
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application
<!-- x-release-please-start-version -->

```ruby
gem "openai", "~> 0.23.3"
gem "openai", "~> 0.24.0"
```

<!-- x-release-please-end -->
Expand Down
15 changes: 5 additions & 10 deletions lib/openai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@
require_relative "openai/structured_output"
require_relative "openai/models/reasoning_effort"
require_relative "openai/models/chat/chat_completion_message"
require_relative "openai/models/responses/response_input_file"
require_relative "openai/models/responses/response_input_image"
require_relative "openai/models/responses/response_input_text"
require_relative "openai/models/responses/response_output_text"
require_relative "openai/models/responses/response_output_refusal"
require_relative "openai/models/graders/score_model_grader"
require_relative "openai/models/graders/python_grader"
require_relative "openai/models/graders/text_similarity_grader"
Expand Down Expand Up @@ -248,7 +253,6 @@
require_relative "openai/models/containers/file_retrieve_response"
require_relative "openai/models/containers/files/content_retrieve_params"
require_relative "openai/models/conversations/computer_screenshot_content"
require_relative "openai/models/conversations/container_file_citation_body"
require_relative "openai/models/conversations/conversation"
require_relative "openai/models/conversations/conversation_create_params"
require_relative "openai/models/conversations/conversation_deleted"
Expand All @@ -258,22 +262,18 @@
require_relative "openai/models/conversations/conversation_item_list"
require_relative "openai/models/conversations/conversation_retrieve_params"
require_relative "openai/models/conversations/conversation_update_params"
require_relative "openai/models/conversations/file_citation_body"
require_relative "openai/models/conversations/input_file_content"
require_relative "openai/models/conversations/input_image_content"
require_relative "openai/models/conversations/input_text_content"
require_relative "openai/models/conversations/item_create_params"
require_relative "openai/models/conversations/item_delete_params"
require_relative "openai/models/conversations/item_list_params"
require_relative "openai/models/conversations/item_retrieve_params"
require_relative "openai/models/conversations/lob_prob"
require_relative "openai/models/conversations/message"
require_relative "openai/models/conversations/output_text_content"
require_relative "openai/models/conversations/refusal_content"
require_relative "openai/models/conversations/summary_text_content"
require_relative "openai/models/conversations/text_content"
require_relative "openai/models/conversations/top_log_prob"
require_relative "openai/models/conversations/url_citation_body"
require_relative "openai/models/create_embedding_response"
require_relative "openai/models/custom_tool_input_format"
require_relative "openai/models/embedding"
Expand Down Expand Up @@ -549,12 +549,9 @@
require_relative "openai/models/responses/response_input"
require_relative "openai/models/responses/response_input_audio"
require_relative "openai/models/responses/response_input_content"
require_relative "openai/models/responses/response_input_file"
require_relative "openai/models/responses/response_input_image"
require_relative "openai/models/responses/response_input_item"
require_relative "openai/models/responses/response_input_message_content_list"
require_relative "openai/models/responses/response_input_message_item"
require_relative "openai/models/responses/response_input_text"
require_relative "openai/models/responses/response_item"
require_relative "openai/models/responses/response_item_list"
require_relative "openai/models/responses/response_mcp_call_arguments_delta_event"
Expand All @@ -570,8 +567,6 @@
require_relative "openai/models/responses/response_output_item_added_event"
require_relative "openai/models/responses/response_output_item_done_event"
require_relative "openai/models/responses/response_output_message"
require_relative "openai/models/responses/response_output_refusal"
require_relative "openai/models/responses/response_output_text"
require_relative "openai/models/responses/response_output_text_annotation_added_event"
require_relative "openai/models/responses/response_prompt"
require_relative "openai/models/responses/response_queued_event"
Expand Down
36 changes: 25 additions & 11 deletions lib/openai/errors.rb
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ class APIError < OpenAI::Errors::Error
# @return [Integer, nil]
attr_accessor :status

# @return [Hash{String=>String}, nil]
attr_accessor :headers

# @return [Object, nil]
attr_accessor :body

Expand All @@ -59,13 +62,15 @@ class APIError < OpenAI::Errors::Error
#
# @param url [URI::Generic]
# @param status [Integer, nil]
# @param headers [Hash{String=>String}, nil]
# @param body [Object, nil]
# @param request [nil]
# @param response [nil]
# @param message [String, nil]
def initialize(url:, status: nil, body: nil, request: nil, response: nil, message: nil)
def initialize(url:, status: nil, headers: nil, body: nil, request: nil, response: nil, message: nil)
@url = url
@status = status
@headers = headers
@body = body
@request = request
@response = response
Expand Down Expand Up @@ -98,13 +103,15 @@ class APIConnectionError < OpenAI::Errors::APIError
#
# @param url [URI::Generic]
# @param status [nil]
# @param headers [Hash{String=>String}, nil]
# @param body [nil]
# @param request [nil]
# @param response [nil]
# @param message [String, nil]
def initialize(
url:,
status: nil,
headers: nil,
body: nil,
request: nil,
response: nil,
Expand All @@ -119,13 +126,15 @@ class APITimeoutError < OpenAI::Errors::APIConnectionError
#
# @param url [URI::Generic]
# @param status [nil]
# @param headers [Hash{String=>String}, nil]
# @param body [nil]
# @param request [nil]
# @param response [nil]
# @param message [String, nil]
def initialize(
url:,
status: nil,
headers: nil,
body: nil,
request: nil,
response: nil,
Expand All @@ -140,21 +149,24 @@ class APIStatusError < OpenAI::Errors::APIError
#
# @param url [URI::Generic]
# @param status [Integer]
# @param headers [Hash{String=>String}, nil]
# @param body [Object, nil]
# @param request [nil]
# @param response [nil]
# @param message [String, nil]
#
# @return [self]
def self.for(url:, status:, body:, request:, response:, message: nil)
kwargs = {
url: url,
status: status,
body: body,
request: request,
response: response,
message: message
}
def self.for(url:, status:, headers:, body:, request:, response:, message: nil)
kwargs =
{
url: url,
status: status,
headers: headers,
body: body,
request: request,
response: response,
message: message
}

case status
in 400
Expand Down Expand Up @@ -198,18 +210,20 @@ def self.for(url:, status:, body:, request:, response:, message: nil)
#
# @param url [URI::Generic]
# @param status [Integer]
# @param headers [Hash{String=>String}, nil]
# @param body [Object, nil]
# @param request [nil]
# @param response [nil]
# @param message [String, nil]
def initialize(url:, status:, body:, request:, response:, message: nil)
def initialize(url:, status:, headers:, body:, request:, response:, message: nil)
message ||= OpenAI::Internal::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} }
@code = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :code))
@param = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :param))
@type = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :type))
super(
url: url,
status: status,
headers: headers,
body: body,
request: request,
response: response,
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/internal/conversation_cursor_page.rb
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def auto_paging_each(&blk)
#
# @param client [OpenAI::Internal::Transport::BaseClient]
# @param req [Hash{Symbol=>Object}]
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param headers [Hash{String=>String}]
# @param page_data [Hash{Symbol=>Object}]
def initialize(client:, req:, headers:, page_data:)
super
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/internal/cursor_page.rb
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def auto_paging_each(&blk)
#
# @param client [OpenAI::Internal::Transport::BaseClient]
# @param req [Hash{Symbol=>Object}]
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param headers [Hash{String=>String}]
# @param page_data [Hash{Symbol=>Object}]
def initialize(client:, req:, headers:, page_data:)
super
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/internal/page.rb
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def auto_paging_each(&blk)
#
# @param client [OpenAI::Internal::Transport::BaseClient]
# @param req [Hash{Symbol=>Object}]
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param headers [Hash{String=>String}]
# @param page_data [Array<Object>]
def initialize(client:, req:, headers:, page_data:)
super
Expand Down
1 change: 1 addition & 0 deletions lib/openai/internal/stream.rb
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ class Stream
err = OpenAI::Errors::APIStatusError.for(
url: @url,
status: @status,
headers: @headers,
body: data,
request: nil,
response: @response,
Expand Down
18 changes: 11 additions & 7 deletions lib/openai/internal/transport/base_client.rb
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def validate!(req)
# @api private
#
# @param status [Integer]
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param headers [Hash{String=>String}]
#
# @return [Boolean]
def should_retry?(status, headers:)
Expand Down Expand Up @@ -85,7 +85,7 @@ def should_retry?(status, headers:)
#
# @param status [Integer]
#
# @param response_headers [Hash{String=>String}, Net::HTTPHeader]
# @param response_headers [Hash{String=>String}]
#
# @return [Hash{Symbol=>Object}]
def follow_redirect(request, status:, response_headers:)
Expand Down Expand Up @@ -378,6 +378,7 @@ def send_request(request, redirect_count:, retry_count:, send_retry_header:)
rescue OpenAI::Errors::APIConnectionError => e
status = e
end
headers = OpenAI::Internal::Util.normalized_headers(response&.each_header&.to_h)

case status
in ..299
Expand All @@ -390,7 +391,7 @@ def send_request(request, redirect_count:, retry_count:, send_retry_header:)
in 300..399
self.class.reap_connection!(status, stream: stream)

request = self.class.follow_redirect(request, status: status, response_headers: response)
request = self.class.follow_redirect(request, status: status, response_headers: headers)
send_request(
request,
redirect_count: redirect_count + 1,
Expand All @@ -399,16 +400,17 @@ def send_request(request, redirect_count:, retry_count:, send_retry_header:)
)
in OpenAI::Errors::APIConnectionError if retry_count >= max_retries
raise status
in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: response)
in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: headers)
decoded = Kernel.then do
OpenAI::Internal::Util.decode_content(response, stream: stream, suppress_error: true)
OpenAI::Internal::Util.decode_content(headers, stream: stream, suppress_error: true)
ensure
self.class.reap_connection!(status, stream: stream)
end

raise OpenAI::Errors::APIStatusError.for(
url: url,
status: status,
headers: headers,
body: decoded,
request: nil,
response: response
Expand Down Expand Up @@ -485,19 +487,21 @@ def request(req)
send_retry_header: send_retry_header
)

decoded = OpenAI::Internal::Util.decode_content(response, stream: stream)
headers = OpenAI::Internal::Util.normalized_headers(response.each_header.to_h)
decoded = OpenAI::Internal::Util.decode_content(headers, stream: stream)
case req
in {stream: Class => st}
st.new(
model: model,
url: url,
status: status,
headers: headers,
response: response,
unwrap: unwrap,
stream: decoded
)
in {page: Class => page}
page.new(client: self, req: req, headers: response, page_data: decoded)
page.new(client: self, req: req, headers: headers, page_data: decoded)
else
unwrapped = OpenAI::Internal::Util.dig(decoded, unwrap)
OpenAI::Internal::Type::Converter.coerce(model, unwrapped)
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/internal/type/base_page.rb
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def to_enum = super(:auto_paging_each)
#
# @param client [OpenAI::Internal::Transport::BaseClient]
# @param req [Hash{Symbol=>Object}]
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param headers [Hash{String=>String}]
# @param page_data [Object]
def initialize(client:, req:, headers:, page_data:)
@client = client
Expand Down
10 changes: 9 additions & 1 deletion lib/openai/internal/type/base_stream.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@ class << self
def defer_closing(stream) = ->(_id) { OpenAI::Internal::Util.close_fused!(stream) }
end

# @return [Integer]
attr_reader :status

# @return [Hash{String=>String}]
attr_reader :headers

# @api public
#
# @return [void]
Expand Down Expand Up @@ -63,13 +69,15 @@ def to_enum = @iterator
# @param model [Class, OpenAI::Internal::Type::Converter]
# @param url [URI::Generic]
# @param status [Integer]
# @param headers [Hash{String=>String}]
# @param response [Net::HTTPResponse]
# @param unwrap [Symbol, Integer, Array<Symbol, Integer>, Proc]
# @param stream [Enumerable<Object>]
def initialize(model:, url:, status:, response:, unwrap:, stream:)
def initialize(model:, url:, status:, headers:, response:, unwrap:, stream:)
@model = model
@url = url
@status = status
@headers = headers
@response = response
@unwrap = unwrap
@stream = stream
Expand Down
2 changes: 1 addition & 1 deletion lib/openai/internal/util.rb
Original file line number Diff line number Diff line change
Expand Up @@ -647,7 +647,7 @@ def force_charset!(content_type, text:)
#
# Assumes each chunk in stream has `Encoding::BINARY`.
#
# @param headers [Hash{String=>String}, Net::HTTPHeader]
# @param headers [Hash{String=>String}]
# @param stream [Enumerable<String>]
# @param suppress_error [Boolean]
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ class ComputerScreenshotContent < OpenAI::Internal::Type::BaseModel
# Some parameter documentations has been truncated, see
# {OpenAI::Models::Conversations::ComputerScreenshotContent} for more details.
#
# A screenshot of a computer.
#
# @param file_id [String, nil] The identifier of an uploaded file that contains the screenshot.
#
# @param image_url [String, nil] The URL of the screenshot image.
Expand Down
Loading