From 0e24b3e0a574de5c0544067c53b9e693e4cec3b1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 27 May 2025 15:34:35 +0000 Subject: [PATCH 1/5] fix: sorbet types for enums, and make tapioca detection ignore `tapioca dsl` --- lib/openai.rb | 4 +++- lib/openai/internal/util.rb | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/openai.rb b/lib/openai.rb index b5aa2ba6..296a5f3d 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -16,7 +16,9 @@ require "uri" # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. -if Object.const_defined?(:Tapioca) && caller.chain([$PROGRAM_NAME]).chain(ARGV).grep(/tapioca/) +if Object.const_defined?(:Tapioca) && + caller.chain([$PROGRAM_NAME]).chain(ARGV).any?(/tapioca/) && + ARGV.none?(/dsl/) return end diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 9e11d6ed..eb5d1ffc 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -875,8 +875,12 @@ def to_sorbet_type(type) case type in OpenAI::Internal::Util::SorbetRuntimeSupport type.to_sorbet_type - else + in Class | Module type + in true | false + T::Boolean + else + type.class end end end From 4ce753088e18a3331fccf6608889243809ce187b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 15:58:00 +0000 Subject: [PATCH 2/5] chore: deprecate Assistants API --- .stats.yml | 2 +- lib/openai/resources/beta/threads.rb | 17 +++++++++++++++-- lib/openai/resources/beta/threads/messages.rb | 11 +++++++++++ lib/openai/resources/beta/threads/runs.rb | 17 +++++++++++++++++ lib/openai/resources/beta/threads/runs/steps.rb | 5 +++++ 5 files changed, 49 insertions(+), 3 deletions(-) diff --git a/.stats.yml b/.stats.yml index 57774fe0..dcba0d15 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c -config_hash: 535b6e5f26a295d609b259c8cb8f656c +config_hash: 3b590818075ca4b54949578b97494525 diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 43c6986e..a4a00fe1 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -3,6 +3,7 @@ module OpenAI module Resources class Beta + # @deprecated The Assistants API is deprecated in favor of the Responses API class Threads # @return [OpenAI::Resources::Beta::Threads::Runs] attr_reader :runs @@ -10,6 +11,8 @@ class Threads # @return [OpenAI::Resources::Beta::Threads::Messages] attr_reader :messages + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadCreateParams} for more details. # @@ -39,6 +42,8 @@ def create(params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Retrieves a thread. # # @overload retrieve(thread_id, request_options: {}) @@ -59,6 +64,8 @@ def retrieve(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadUpdateParams} for more details. # @@ -88,6 +95,8 @@ def update(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Delete a thread. # # @overload delete(thread_id, request_options: {}) @@ -108,6 +117,8 @@ def delete(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # # Some parameter documentations has been truncated, see @@ -166,11 +177,13 @@ def create_and_run(params) options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end - + def stream raise NotImplementedError.new("higher level helpers are coming soon!") end - + + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming # counterpart. # diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index fbc2fcb8..50bafc68 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -4,7 +4,10 @@ module OpenAI module Resources class Beta class Threads + # @deprecated The Assistants API is deprecated in favor of the Responses API class Messages + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageCreateParams} for more details. # @@ -38,6 +41,8 @@ def create(thread_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageRetrieveParams} for more details. # @@ -68,6 +73,8 @@ def retrieve(message_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageUpdateParams} for more details. # @@ -101,6 +108,8 @@ def update(message_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageListParams} for more details. # @@ -137,6 +146,8 @@ def list(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Deletes a message. # # @overload delete(message_id, thread_id:, request_options: {}) diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 50128b5a..6c1d4b54 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -4,10 +4,13 @@ module OpenAI module Resources class Beta class Threads + # @deprecated The Assistants API is deprecated in favor of the Responses API class Runs # @return [OpenAI::Resources::Beta::Threads::Runs::Steps] attr_reader :steps + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming # counterpart. # @@ -76,6 +79,8 @@ def create(thread_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming # counterpart. # @@ -147,6 +152,8 @@ def create_stream_raw(thread_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::RunRetrieveParams} for more details. # @@ -177,6 +184,8 @@ def retrieve(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::RunUpdateParams} for more details. # @@ -210,6 +219,8 @@ def update(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::RunListParams} for more details. # @@ -244,6 +255,8 @@ def list(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Cancels a run that is `in_progress`. # # @overload cancel(run_id, thread_id:, request_options: {}) @@ -271,6 +284,8 @@ def cancel(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for # streaming counterpart. # @@ -314,6 +329,8 @@ def submit_tool_outputs(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for # non-streaming counterpart. # diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 6accaeb6..669ce368 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -5,7 +5,10 @@ module Resources class Beta class Threads class Runs + # @deprecated The Assistants API is deprecated in favor of the Responses API class Steps + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams} for more details. # @@ -45,6 +48,8 @@ def retrieve(step_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::Runs::StepListParams} for more details. # From 214e516f286a026e5b040ffd76b930cad7d5eabf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 20:47:27 +0000 Subject: [PATCH 3/5] feat(api): Config update for pakrym-stream-param --- .stats.yml | 6 +- .../models/responses/response_output_text.rb | 69 +++++++++- .../responses/response_retrieve_params.rb | 12 +- lib/openai/resources/responses.rb | 48 ++++++- .../models/responses/response_output_text.rbi | 118 +++++++++++++++++- .../responses/response_retrieve_params.rbi | 11 ++ rbi/openai/resources/responses.rbi | 40 ++++++ .../models/responses/response_output_text.rbs | 68 +++++++++- .../responses/response_retrieve_params.rbs | 11 +- sig/openai/resources/responses.rbs | 8 ++ 10 files changed, 381 insertions(+), 10 deletions(-) diff --git a/.stats.yml b/.stats.yml index dcba0d15..4aa085f5 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml -openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c -config_hash: 3b590818075ca4b54949578b97494525 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d4bcffecf0cdadf746faa6708ed1ec81fac451f9b857deabbab26f0a343b9314.yml +openapi_spec_hash: 7c54a18b4381248bda7cc34c52142615 +config_hash: 2102e4b25bbcab5d32d5ffa5d34daa0c diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 7d4005e3..79440d4d 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -31,13 +31,23 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :output_text] required :type, const: :output_text - # @!method initialize(annotations:, text:, type: :output_text) + # @!attribute logprobs + # + # @return [Array, nil] + optional :logprobs, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob] + } + + # @!method initialize(annotations:, text:, logprobs: nil, type: :output_text) # A text output from the model. # # @param annotations [Array] The annotations of the text output. # # @param text [String] The text output from the model. # + # @param logprobs [Array] + # # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. # A citation to a file. @@ -165,6 +175,63 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] end + + class Logprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # + # @return [String] + required :token, String + + # @!attribute bytes + # + # @return [Array] + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer] + + # @!attribute logprob + # + # @return [Float] + required :logprob, Float + + # @!attribute top_logprobs + # + # @return [Array] + required :top_logprobs, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + } + + # @!method initialize(token:, bytes:, logprob:, top_logprobs:) + # The log probability of a token. + # + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] + # @param top_logprobs [Array] + + class TopLogprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # + # @return [String] + required :token, String + + # @!attribute bytes + # + # @return [Array] + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer] + + # @!attribute logprob + # + # @return [Float] + required :logprob, Float + + # @!method initialize(token:, bytes:, logprob:) + # The top log probability of a token. + # + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] + end + end end end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 8063503f..979fe0b2 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -4,6 +4,8 @@ module OpenAI module Models module Responses # @see OpenAI::Resources::Responses#retrieve + # + # @see OpenAI::Resources::Responses#retrieve_streaming class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -15,12 +17,20 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } - # @!method initialize(include: nil, request_options: {}) + # @!attribute starting_after + # The sequence number of the event after which to start streaming. + # + # @return [Integer, nil] + optional :starting_after, Integer + + # @!method initialize(include: nil, starting_after: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # # @param include [Array] Additional fields to include in the response. See the `include` # + # @param starting_after [Integer] The sequence number of the event after which to start streaming. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 84ab1476..11143033 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -242,17 +242,21 @@ def stream_raw(params) ) end + # See {OpenAI::Resources::Responses#retrieve_streaming} for streaming counterpart. + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # # Retrieves a model response with the given ID. # - # @overload retrieve(response_id, include: nil, request_options: {}) + # @overload retrieve(response_id, include: nil, starting_after: nil, request_options: {}) # # @param response_id [String] The ID of the response to retrieve. # # @param include [Array] Additional fields to include in the response. See the `include` # + # @param starting_after [Integer] The sequence number of the event after which to start streaming. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] @@ -260,6 +264,10 @@ def stream_raw(params) # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve(response_id, params = {}) parsed, options = OpenAI::Responses::ResponseRetrieveParams.dump_request(params) + if parsed[:stream] + message = "Please use `#retrieve_streaming` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :get, path: ["responses/%1$s", response_id], @@ -269,6 +277,44 @@ def retrieve(response_id, params = {}) ) end + # See {OpenAI::Resources::Responses#retrieve} for non-streaming counterpart. + # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. + # + # Retrieves a model response with the given ID. + # + # @overload retrieve_streaming(response_id, include: nil, starting_after: nil, request_options: {}) + # + # @param response_id [String] The ID of the response to retrieve. + # + # @param include [Array] Additional fields to include in the response. See the `include` + # + # @param starting_after [Integer] The sequence number of the event after which to start streaming. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::Stream] + # + # @see OpenAI::Models::Responses::ResponseRetrieveParams + def retrieve_streaming(response_id, params = {}) + parsed, options = OpenAI::Responses::ResponseRetrieveParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#retrieve` for the non-streaming use case." + raise ArgumentError.new(message) + end + parsed.store(:stream, true) + @client.request( + method: :get, + path: ["responses/%1$s", response_id], + query: parsed, + headers: {"accept" => "text/event-stream"}, + stream: OpenAI::Internal::Stream, + model: OpenAI::Responses::ResponseStreamEvent, + options: options + ) + end + # Deletes a model response with the given ID. # # @overload delete(response_id, request_options: {}) diff --git a/rbi/openai/models/responses/response_output_text.rbi b/rbi/openai/models/responses/response_output_text.rbi index fc5841dc..7c212ca9 100644 --- a/rbi/openai/models/responses/response_output_text.rbi +++ b/rbi/openai/models/responses/response_output_text.rbi @@ -38,6 +38,21 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseOutputText::Logprob]) + ) + end + attr_reader :logprobs + + sig do + params( + logprobs: + T::Array[OpenAI::Responses::ResponseOutputText::Logprob::OrHash] + ).void + end + attr_writer :logprobs + # A text output from the model. sig do params( @@ -50,6 +65,8 @@ module OpenAI ) ], text: String, + logprobs: + T::Array[OpenAI::Responses::ResponseOutputText::Logprob::OrHash], type: Symbol ).returns(T.attached_class) end @@ -58,6 +75,7 @@ module OpenAI annotations:, # The text output from the model. text:, + logprobs: nil, # The type of the output text. Always `output_text`. type: :output_text ) @@ -75,7 +93,8 @@ module OpenAI ) ], text: String, - type: Symbol + type: Symbol, + logprobs: T::Array[OpenAI::Responses::ResponseOutputText::Logprob] } ) end @@ -265,6 +284,103 @@ module OpenAI def self.variants end end + + class Logprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Logprob, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :token + + sig { returns(T::Array[Integer]) } + attr_accessor :bytes + + sig { returns(Float) } + attr_accessor :logprob + + sig do + returns( + T::Array[ + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob + ] + ) + end + attr_accessor :top_logprobs + + # The log probability of a token. + sig do + params( + token: String, + bytes: T::Array[Integer], + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob::OrHash + ] + ).returns(T.attached_class) + end + def self.new(token:, bytes:, logprob:, top_logprobs:) + end + + sig do + override.returns( + { + token: String, + bytes: T::Array[Integer], + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob + ] + } + ) + end + def to_hash + end + + class TopLogprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :token + + sig { returns(T::Array[Integer]) } + attr_accessor :bytes + + sig { returns(Float) } + attr_accessor :logprob + + # The top log probability of a token. + sig do + params( + token: String, + bytes: T::Array[Integer], + logprob: Float + ).returns(T.attached_class) + end + def self.new(token:, bytes:, logprob:) + end + + sig do + override.returns( + { token: String, bytes: T::Array[Integer], logprob: Float } + ) + end + def to_hash + end + end + end end end end diff --git a/rbi/openai/models/responses/response_retrieve_params.rbi b/rbi/openai/models/responses/response_retrieve_params.rbi index c1e80237..c25abeb6 100644 --- a/rbi/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/openai/models/responses/response_retrieve_params.rbi @@ -31,9 +31,17 @@ module OpenAI end attr_writer :include + # The sequence number of the event after which to start streaming. + sig { returns(T.nilable(Integer)) } + attr_reader :starting_after + + sig { params(starting_after: Integer).void } + attr_writer :starting_after + sig do params( include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -41,6 +49,8 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, + # The sequence number of the event after which to start streaming. + starting_after: nil, request_options: {} ) end @@ -50,6 +60,7 @@ module OpenAI { include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index ecb2f74c..bb55b332 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -428,11 +428,15 @@ module OpenAI ) end + # See {OpenAI::Resources::Responses#retrieve_streaming} for streaming counterpart. + # # Retrieves a model response with the given ID. sig do params( response_id: String, include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, + stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Responses::Response) end @@ -442,6 +446,42 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, + # The sequence number of the event after which to start streaming. + starting_after: nil, + # There is no need to provide `stream:`. Instead, use `#retrieve_streaming` or + # `#retrieve` for streaming and non-streaming use cases, respectively. + stream: false, + request_options: {} + ) + end + + # See {OpenAI::Resources::Responses#retrieve} for non-streaming counterpart. + # + # Retrieves a model response with the given ID. + sig do + params( + response_id: String, + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, + stream: T.noreturn, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[ + OpenAI::Responses::ResponseStreamEvent::Variants + ] + ) + end + def retrieve_streaming( + # The ID of the response to retrieve. + response_id, + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + include: nil, + # The sequence number of the event after which to start streaming. + starting_after: nil, + # There is no need to provide `stream:`. Instead, use `#retrieve_streaming` or + # `#retrieve` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index d374deb4..d29dc9ec 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -5,7 +5,8 @@ module OpenAI { annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, - type: :output_text + type: :output_text, + logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob] } class ResponseOutputText < OpenAI::Internal::Type::BaseModel @@ -15,16 +16,24 @@ module OpenAI attr_accessor type: :output_text + attr_reader logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob]? + + def logprobs=: ( + ::Array[OpenAI::Responses::ResponseOutputText::Logprob] + ) -> ::Array[OpenAI::Responses::ResponseOutputText::Logprob] + def initialize: ( annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, + ?logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob], ?type: :output_text ) -> void def to_hash: -> { annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, - type: :output_text + type: :output_text, + logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob] } type annotation = @@ -119,6 +128,61 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] end + + type logprob = + { + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + } + + class Logprob < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor bytes: ::Array[Integer] + + attr_accessor logprob: Float + + attr_accessor top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + + def initialize: ( + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + } + + type top_logprob = + { token: String, bytes: ::Array[Integer], logprob: Float } + + class TopLogprob < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor bytes: ::Array[Integer] + + attr_accessor logprob: Float + + def initialize: ( + token: String, + bytes: ::Array[Integer], + logprob: Float + ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float + } + end + end end end end diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index c90b79c2..56f3ed6a 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -2,7 +2,10 @@ module OpenAI module Models module Responses type response_retrieve_params = - { include: ::Array[OpenAI::Models::Responses::response_includable] } + { + include: ::Array[OpenAI::Models::Responses::response_includable], + starting_after: Integer + } & OpenAI::Internal::Type::request_parameters class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel @@ -15,13 +18,19 @@ module OpenAI ::Array[OpenAI::Models::Responses::response_includable] ) -> ::Array[OpenAI::Models::Responses::response_includable] + attr_reader starting_after: Integer? + + def starting_after=: (Integer) -> Integer + def initialize: ( ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?starting_after: Integer, ?request_options: OpenAI::request_opts ) -> void def to_hash: -> { include: ::Array[OpenAI::Models::Responses::response_includable], + starting_after: Integer, request_options: OpenAI::RequestOptions } end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 97ccd557..ee118696 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -52,9 +52,17 @@ module OpenAI def retrieve: ( String response_id, ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?starting_after: Integer, ?request_options: OpenAI::request_opts ) -> OpenAI::Responses::Response + def retrieve_streaming: ( + String response_id, + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?starting_after: Integer, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::Stream[OpenAI::Models::Responses::response_stream_event] + def delete: ( String response_id, ?request_options: OpenAI::request_opts From 2b7122ad724620269c3b403d5a584d710bed5b5c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 01:54:17 +0000 Subject: [PATCH 4/5] fix(client): return binary content from `get /containers/{container_id}/files/{file_id}/content` --- .stats.yml | 2 +- lib/openai/resources/containers/files/content.rb | 5 +++-- rbi/openai/resources/containers/files/content.rbi | 2 +- sig/openai/resources/containers/files/content.rbs | 2 +- test/openai/resources/containers/files/content_test.rb | 4 +++- 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.stats.yml b/.stats.yml index 4aa085f5..2e733899 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d4bcffecf0cdadf746faa6708ed1ec81fac451f9b857deabbab26f0a343b9314.yml openapi_spec_hash: 7c54a18b4381248bda7cc34c52142615 -config_hash: 2102e4b25bbcab5d32d5ffa5d34daa0c +config_hash: d23f847b9ebb3f427d0f198035bd3e9f diff --git a/lib/openai/resources/containers/files/content.rb b/lib/openai/resources/containers/files/content.rb index 3d07b16e..ba07f678 100644 --- a/lib/openai/resources/containers/files/content.rb +++ b/lib/openai/resources/containers/files/content.rb @@ -13,7 +13,7 @@ class Content # @param container_id [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [nil] + # @return [StringIO] # # @see OpenAI::Models::Containers::Files::ContentRetrieveParams def retrieve(file_id, params) @@ -25,7 +25,8 @@ def retrieve(file_id, params) @client.request( method: :get, path: ["containers/%1$s/files/%2$s/content", container_id, file_id], - model: NilClass, + headers: {"accept" => "application/binary"}, + model: StringIO, options: options ) end diff --git a/rbi/openai/resources/containers/files/content.rbi b/rbi/openai/resources/containers/files/content.rbi index a299fe68..bb901b30 100644 --- a/rbi/openai/resources/containers/files/content.rbi +++ b/rbi/openai/resources/containers/files/content.rbi @@ -11,7 +11,7 @@ module OpenAI file_id: String, container_id: String, request_options: OpenAI::RequestOptions::OrHash - ).void + ).returns(StringIO) end def retrieve(file_id, container_id:, request_options: {}) end diff --git a/sig/openai/resources/containers/files/content.rbs b/sig/openai/resources/containers/files/content.rbs index 898c9988..03e09259 100644 --- a/sig/openai/resources/containers/files/content.rbs +++ b/sig/openai/resources/containers/files/content.rbs @@ -7,7 +7,7 @@ module OpenAI String file_id, container_id: String, ?request_options: OpenAI::request_opts - ) -> nil + ) -> StringIO def initialize: (client: OpenAI::Client) -> void end diff --git a/test/openai/resources/containers/files/content_test.rb b/test/openai/resources/containers/files/content_test.rb index 994b2806..4d4252d4 100644 --- a/test/openai/resources/containers/files/content_test.rb +++ b/test/openai/resources/containers/files/content_test.rb @@ -4,10 +4,12 @@ class OpenAI::Test::Resources::Containers::Files::ContentTest < OpenAI::Test::ResourceTest def test_retrieve_required_params + skip("skipped: test server currently has no support for method content-type") + response = @openai.containers.files.content.retrieve("file_id", container_id: "container_id") assert_pattern do - response => nil + response => StringIO end end end From d9cf640d48cf4efd4449b955954033c2baa760c4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 15:58:54 +0000 Subject: [PATCH 5/5] release: 0.5.0 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 19 +++++++++++++++++++ Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 5 files changed, 23 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 3e2bf498..2aca35ae 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.4.1" + ".": "0.5.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 437ee8b2..5f18dfc2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## 0.5.0 (2025-05-29) + +Full Changelog: [v0.4.1...v0.5.0](https://github.com/openai/openai-ruby/compare/v0.4.1...v0.5.0) + +### Features + +* **api:** Config update for pakrym-stream-param ([214e516](https://github.com/openai/openai-ruby/commit/214e516f286a026e5b040ffd76b930cad7d5eabf)) + + +### Bug Fixes + +* **client:** return binary content from `get /containers/{container_id}/files/{file_id}/content` ([2b7122a](https://github.com/openai/openai-ruby/commit/2b7122ad724620269c3b403d5a584d710bed5b5c)) +* sorbet types for enums, and make tapioca detection ignore `tapioca dsl` ([0e24b3e](https://github.com/openai/openai-ruby/commit/0e24b3e0a574de5c0544067c53b9e693e4cec3b1)) + + +### Chores + +* deprecate Assistants API ([4ce7530](https://github.com/openai/openai-ruby/commit/4ce753088e18a3331fccf6608889243809ce187b)) + ## 0.4.1 (2025-05-23) Full Changelog: [v0.4.0-beta.1...v0.4.1](https://github.com/openai/openai-ruby/compare/v0.4.0-beta.1...v0.4.1) diff --git a/Gemfile.lock b/Gemfile.lock index 64aeded6..503224bb 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.4.1) + openai (0.5.0) connection_pool GEM diff --git a/README.md b/README.md index d902b49a..ebc0d58c 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.4.1" +gem "openai", "~> 0.5.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index a71b9348..56f83a91 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.4.1" + VERSION = "0.5.0" end