Skip to content

Commit 214e516

Browse files
feat(api): Config update for pakrym-stream-param
1 parent 4ce7530 commit 214e516

File tree

10 files changed

+381
-10
lines changed

10 files changed

+381
-10
lines changed

.stats.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 109
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml
3-
openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c
4-
config_hash: 3b590818075ca4b54949578b97494525
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d4bcffecf0cdadf746faa6708ed1ec81fac451f9b857deabbab26f0a343b9314.yml
3+
openapi_spec_hash: 7c54a18b4381248bda7cc34c52142615
4+
config_hash: 2102e4b25bbcab5d32d5ffa5d34daa0c

lib/openai/models/responses/response_output_text.rb

Lines changed: 68 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,13 +31,23 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel
3131
# @return [Symbol, :output_text]
3232
required :type, const: :output_text
3333

34-
# @!method initialize(annotations:, text:, type: :output_text)
34+
# @!attribute logprobs
35+
#
36+
# @return [Array<OpenAI::Models::Responses::ResponseOutputText::Logprob>, nil]
37+
optional :logprobs,
38+
-> {
39+
OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob]
40+
}
41+
42+
# @!method initialize(annotations:, text:, logprobs: nil, type: :output_text)
3543
# A text output from the model.
3644
#
3745
# @param annotations [Array<OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath>] The annotations of the text output.
3846
#
3947
# @param text [String] The text output from the model.
4048
#
49+
# @param logprobs [Array<OpenAI::Models::Responses::ResponseOutputText::Logprob>]
50+
#
4151
# @param type [Symbol, :output_text] The type of the output text. Always `output_text`.
4252

4353
# A citation to a file.
@@ -165,6 +175,63 @@ class FilePath < OpenAI::Internal::Type::BaseModel
165175
# @!method self.variants
166176
# @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)]
167177
end
178+
179+
class Logprob < OpenAI::Internal::Type::BaseModel
180+
# @!attribute token
181+
#
182+
# @return [String]
183+
required :token, String
184+
185+
# @!attribute bytes
186+
#
187+
# @return [Array<Integer>]
188+
required :bytes, OpenAI::Internal::Type::ArrayOf[Integer]
189+
190+
# @!attribute logprob
191+
#
192+
# @return [Float]
193+
required :logprob, Float
194+
195+
# @!attribute top_logprobs
196+
#
197+
# @return [Array<OpenAI::Models::Responses::ResponseOutputText::Logprob::TopLogprob>]
198+
required :top_logprobs,
199+
-> {
200+
OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob]
201+
}
202+
203+
# @!method initialize(token:, bytes:, logprob:, top_logprobs:)
204+
# The log probability of a token.
205+
#
206+
# @param token [String]
207+
# @param bytes [Array<Integer>]
208+
# @param logprob [Float]
209+
# @param top_logprobs [Array<OpenAI::Models::Responses::ResponseOutputText::Logprob::TopLogprob>]
210+
211+
class TopLogprob < OpenAI::Internal::Type::BaseModel
212+
# @!attribute token
213+
#
214+
# @return [String]
215+
required :token, String
216+
217+
# @!attribute bytes
218+
#
219+
# @return [Array<Integer>]
220+
required :bytes, OpenAI::Internal::Type::ArrayOf[Integer]
221+
222+
# @!attribute logprob
223+
#
224+
# @return [Float]
225+
required :logprob, Float
226+
227+
# @!method initialize(token:, bytes:, logprob:)
228+
# The top log probability of a token.
229+
#
230+
# @param token [String]
231+
# @param bytes [Array<Integer>]
232+
# @param logprob [Float]
233+
end
234+
end
168235
end
169236
end
170237
end

lib/openai/models/responses/response_retrieve_params.rb

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ module OpenAI
44
module Models
55
module Responses
66
# @see OpenAI::Resources::Responses#retrieve
7+
#
8+
# @see OpenAI::Resources::Responses#retrieve_streaming
79
class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel
810
extend OpenAI::Internal::Type::RequestParameters::Converter
911
include OpenAI::Internal::Type::RequestParameters
@@ -15,12 +17,20 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel
1517
# @return [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil]
1618
optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }
1719

18-
# @!method initialize(include: nil, request_options: {})
20+
# @!attribute starting_after
21+
# The sequence number of the event after which to start streaming.
22+
#
23+
# @return [Integer, nil]
24+
optional :starting_after, Integer
25+
26+
# @!method initialize(include: nil, starting_after: nil, request_options: {})
1927
# Some parameter documentations has been truncated, see
2028
# {OpenAI::Models::Responses::ResponseRetrieveParams} for more details.
2129
#
2230
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
2331
#
32+
# @param starting_after [Integer] The sequence number of the event after which to start streaming.
33+
#
2434
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}]
2535
end
2636
end

lib/openai/resources/responses.rb

Lines changed: 47 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -242,24 +242,32 @@ def stream_raw(params)
242242
)
243243
end
244244

245+
# See {OpenAI::Resources::Responses#retrieve_streaming} for streaming counterpart.
246+
#
245247
# Some parameter documentations has been truncated, see
246248
# {OpenAI::Models::Responses::ResponseRetrieveParams} for more details.
247249
#
248250
# Retrieves a model response with the given ID.
249251
#
250-
# @overload retrieve(response_id, include: nil, request_options: {})
252+
# @overload retrieve(response_id, include: nil, starting_after: nil, request_options: {})
251253
#
252254
# @param response_id [String] The ID of the response to retrieve.
253255
#
254256
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
255257
#
258+
# @param starting_after [Integer] The sequence number of the event after which to start streaming.
259+
#
256260
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
257261
#
258262
# @return [OpenAI::Models::Responses::Response]
259263
#
260264
# @see OpenAI::Models::Responses::ResponseRetrieveParams
261265
def retrieve(response_id, params = {})
262266
parsed, options = OpenAI::Responses::ResponseRetrieveParams.dump_request(params)
267+
if parsed[:stream]
268+
message = "Please use `#retrieve_streaming` for the streaming use case."
269+
raise ArgumentError.new(message)
270+
end
263271
@client.request(
264272
method: :get,
265273
path: ["responses/%1$s", response_id],
@@ -269,6 +277,44 @@ def retrieve(response_id, params = {})
269277
)
270278
end
271279

280+
# See {OpenAI::Resources::Responses#retrieve} for non-streaming counterpart.
281+
#
282+
# Some parameter documentations has been truncated, see
283+
# {OpenAI::Models::Responses::ResponseRetrieveParams} for more details.
284+
#
285+
# Retrieves a model response with the given ID.
286+
#
287+
# @overload retrieve_streaming(response_id, include: nil, starting_after: nil, request_options: {})
288+
#
289+
# @param response_id [String] The ID of the response to retrieve.
290+
#
291+
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
292+
#
293+
# @param starting_after [Integer] The sequence number of the event after which to start streaming.
294+
#
295+
# @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
296+
#
297+
# @return [OpenAI::Internal::Stream<OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent>]
298+
#
299+
# @see OpenAI::Models::Responses::ResponseRetrieveParams
300+
def retrieve_streaming(response_id, params = {})
301+
parsed, options = OpenAI::Responses::ResponseRetrieveParams.dump_request(params)
302+
unless parsed.fetch(:stream, true)
303+
message = "Please use `#retrieve` for the non-streaming use case."
304+
raise ArgumentError.new(message)
305+
end
306+
parsed.store(:stream, true)
307+
@client.request(
308+
method: :get,
309+
path: ["responses/%1$s", response_id],
310+
query: parsed,
311+
headers: {"accept" => "text/event-stream"},
312+
stream: OpenAI::Internal::Stream,
313+
model: OpenAI::Responses::ResponseStreamEvent,
314+
options: options
315+
)
316+
end
317+
272318
# Deletes a model response with the given ID.
273319
#
274320
# @overload delete(response_id, request_options: {})

rbi/openai/models/responses/response_output_text.rbi

Lines changed: 117 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,21 @@ module OpenAI
3838
sig { returns(Symbol) }
3939
attr_accessor :type
4040

41+
sig do
42+
returns(
43+
T.nilable(T::Array[OpenAI::Responses::ResponseOutputText::Logprob])
44+
)
45+
end
46+
attr_reader :logprobs
47+
48+
sig do
49+
params(
50+
logprobs:
51+
T::Array[OpenAI::Responses::ResponseOutputText::Logprob::OrHash]
52+
).void
53+
end
54+
attr_writer :logprobs
55+
4156
# A text output from the model.
4257
sig do
4358
params(
@@ -50,6 +65,8 @@ module OpenAI
5065
)
5166
],
5267
text: String,
68+
logprobs:
69+
T::Array[OpenAI::Responses::ResponseOutputText::Logprob::OrHash],
5370
type: Symbol
5471
).returns(T.attached_class)
5572
end
@@ -58,6 +75,7 @@ module OpenAI
5875
annotations:,
5976
# The text output from the model.
6077
text:,
78+
logprobs: nil,
6179
# The type of the output text. Always `output_text`.
6280
type: :output_text
6381
)
@@ -75,7 +93,8 @@ module OpenAI
7593
)
7694
],
7795
text: String,
78-
type: Symbol
96+
type: Symbol,
97+
logprobs: T::Array[OpenAI::Responses::ResponseOutputText::Logprob]
7998
}
8099
)
81100
end
@@ -265,6 +284,103 @@ module OpenAI
265284
def self.variants
266285
end
267286
end
287+
288+
class Logprob < OpenAI::Internal::Type::BaseModel
289+
OrHash =
290+
T.type_alias do
291+
T.any(
292+
OpenAI::Responses::ResponseOutputText::Logprob,
293+
OpenAI::Internal::AnyHash
294+
)
295+
end
296+
297+
sig { returns(String) }
298+
attr_accessor :token
299+
300+
sig { returns(T::Array[Integer]) }
301+
attr_accessor :bytes
302+
303+
sig { returns(Float) }
304+
attr_accessor :logprob
305+
306+
sig do
307+
returns(
308+
T::Array[
309+
OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob
310+
]
311+
)
312+
end
313+
attr_accessor :top_logprobs
314+
315+
# The log probability of a token.
316+
sig do
317+
params(
318+
token: String,
319+
bytes: T::Array[Integer],
320+
logprob: Float,
321+
top_logprobs:
322+
T::Array[
323+
OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob::OrHash
324+
]
325+
).returns(T.attached_class)
326+
end
327+
def self.new(token:, bytes:, logprob:, top_logprobs:)
328+
end
329+
330+
sig do
331+
override.returns(
332+
{
333+
token: String,
334+
bytes: T::Array[Integer],
335+
logprob: Float,
336+
top_logprobs:
337+
T::Array[
338+
OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob
339+
]
340+
}
341+
)
342+
end
343+
def to_hash
344+
end
345+
346+
class TopLogprob < OpenAI::Internal::Type::BaseModel
347+
OrHash =
348+
T.type_alias do
349+
T.any(
350+
OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob,
351+
OpenAI::Internal::AnyHash
352+
)
353+
end
354+
355+
sig { returns(String) }
356+
attr_accessor :token
357+
358+
sig { returns(T::Array[Integer]) }
359+
attr_accessor :bytes
360+
361+
sig { returns(Float) }
362+
attr_accessor :logprob
363+
364+
# The top log probability of a token.
365+
sig do
366+
params(
367+
token: String,
368+
bytes: T::Array[Integer],
369+
logprob: Float
370+
).returns(T.attached_class)
371+
end
372+
def self.new(token:, bytes:, logprob:)
373+
end
374+
375+
sig do
376+
override.returns(
377+
{ token: String, bytes: T::Array[Integer], logprob: Float }
378+
)
379+
end
380+
def to_hash
381+
end
382+
end
383+
end
268384
end
269385
end
270386
end

rbi/openai/models/responses/response_retrieve_params.rbi

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,16 +31,26 @@ module OpenAI
3131
end
3232
attr_writer :include
3333

34+
# The sequence number of the event after which to start streaming.
35+
sig { returns(T.nilable(Integer)) }
36+
attr_reader :starting_after
37+
38+
sig { params(starting_after: Integer).void }
39+
attr_writer :starting_after
40+
3441
sig do
3542
params(
3643
include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol],
44+
starting_after: Integer,
3745
request_options: OpenAI::RequestOptions::OrHash
3846
).returns(T.attached_class)
3947
end
4048
def self.new(
4149
# Additional fields to include in the response. See the `include` parameter for
4250
# Response creation above for more information.
4351
include: nil,
52+
# The sequence number of the event after which to start streaming.
53+
starting_after: nil,
4454
request_options: {}
4555
)
4656
end
@@ -50,6 +60,7 @@ module OpenAI
5060
{
5161
include:
5262
T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol],
63+
starting_after: Integer,
5364
request_options: OpenAI::RequestOptions
5465
}
5566
)

0 commit comments

Comments
 (0)