@@ -242,24 +242,32 @@ def stream_raw(params)
242242 )
243243 end
244244
245+ # See {OpenAI::Resources::Responses#retrieve_streaming} for streaming counterpart.
246+ #
245247 # Some parameter documentations has been truncated, see
246248 # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details.
247249 #
248250 # Retrieves a model response with the given ID.
249251 #
250- # @overload retrieve(response_id, include: nil, request_options: {})
252+ # @overload retrieve(response_id, include: nil, starting_after: nil, request_options: {})
251253 #
252254 # @param response_id [String] The ID of the response to retrieve.
253255 #
254256 # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
255257 #
258+ # @param starting_after [Integer] The sequence number of the event after which to start streaming.
259+ #
256260 # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
257261 #
258262 # @return [OpenAI::Models::Responses::Response]
259263 #
260264 # @see OpenAI::Models::Responses::ResponseRetrieveParams
261265 def retrieve ( response_id , params = { } )
262266 parsed , options = OpenAI ::Responses ::ResponseRetrieveParams . dump_request ( params )
267+ if parsed [ :stream ]
268+ message = "Please use `#retrieve_streaming` for the streaming use case."
269+ raise ArgumentError . new ( message )
270+ end
263271 @client . request (
264272 method : :get ,
265273 path : [ "responses/%1$s" , response_id ] ,
@@ -269,6 +277,44 @@ def retrieve(response_id, params = {})
269277 )
270278 end
271279
280+ # See {OpenAI::Resources::Responses#retrieve} for non-streaming counterpart.
281+ #
282+ # Some parameter documentations has been truncated, see
283+ # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details.
284+ #
285+ # Retrieves a model response with the given ID.
286+ #
287+ # @overload retrieve_streaming(response_id, include: nil, starting_after: nil, request_options: {})
288+ #
289+ # @param response_id [String] The ID of the response to retrieve.
290+ #
291+ # @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>] Additional fields to include in the response. See the `include`
292+ #
293+ # @param starting_after [Integer] The sequence number of the event after which to start streaming.
294+ #
295+ # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil]
296+ #
297+ # @return [OpenAI::Internal::Stream<OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent>]
298+ #
299+ # @see OpenAI::Models::Responses::ResponseRetrieveParams
300+ def retrieve_streaming ( response_id , params = { } )
301+ parsed , options = OpenAI ::Responses ::ResponseRetrieveParams . dump_request ( params )
302+ unless parsed . fetch ( :stream , true )
303+ message = "Please use `#retrieve` for the non-streaming use case."
304+ raise ArgumentError . new ( message )
305+ end
306+ parsed . store ( :stream , true )
307+ @client . request (
308+ method : :get ,
309+ path : [ "responses/%1$s" , response_id ] ,
310+ query : parsed ,
311+ headers : { "accept" => "text/event-stream" } ,
312+ stream : OpenAI ::Internal ::Stream ,
313+ model : OpenAI ::Responses ::ResponseStreamEvent ,
314+ options : options
315+ )
316+ end
317+
272318 # Deletes a model response with the given ID.
273319 #
274320 # @overload delete(response_id, request_options: {})
0 commit comments