@@ -5,93 +5,52 @@ module Providers
55 module Gemini
66 # Streaming methods for the Gemini API implementation
77 module Streaming
8- # Need to make stream_completion public for chat.rb to access
9- def stream_completion ( model , payload , &block ) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
10- url = "models/#{ model } :streamGenerateContent?alt=sse"
11- accumulator = StreamAccumulator . new
12-
13- post ( url , payload ) do |req |
14- req . options . on_data = stream_handler ( accumulator , &block )
15- end
16-
17- # If this is a tool call, immediately execute it and include the result
18- message = accumulator . to_message
19- if message . tool_call? && message . content . to_s . empty? && @tools && !@tools . empty?
20- tool_call = message . tool_calls . values . first
21- tool = @tools [ tool_call . name . to_sym ]
22-
23- if tool
24- tool_result = tool . call ( tool_call . arguments )
25- # Create a new chunk with the result
26- result_chunk = Chunk . new (
27- role : :assistant ,
28- content : "The result is #{ tool_result } " ,
29- model_id : message . model_id ,
30- input_tokens : message . input_tokens ,
31- output_tokens : message . output_tokens ,
32- tool_calls : message . tool_calls
33- )
34-
35- # Add to accumulator and call the block
36- accumulator . add ( result_chunk )
37- block . call ( result_chunk )
38- end
39- end
8+ def stream_url
9+ "models/#{ @model } :streamGenerateContent?alt=sse"
10+ end
4011
41- accumulator . to_message
12+ def build_chunk ( data )
13+ Chunk . new (
14+ role : :assistant ,
15+ model_id : extract_model_id ( data ) ,
16+ content : extract_content ( data ) ,
17+ input_tokens : extract_input_tokens ( data ) ,
18+ output_tokens : extract_output_tokens ( data ) ,
19+ tool_calls : extract_tool_calls ( data )
20+ )
4221 end
4322
4423 private
4524
46- # Handle streaming
47- def stream_handler ( accumulator , &block ) # rubocop:disable Metrics/AbcSize,Metrics/CyclomaticComplexity,Metrics/MethodLength,Metrics/PerceivedComplexity
48- to_json_stream do |data | # rubocop:disable Metrics/BlockLength
49- next unless data [ 'candidates' ] &.any?
50-
51- candidate = data [ 'candidates' ] [ 0 ]
52- parts = candidate . dig ( 'content' , 'parts' )
53- model_id = data [ 'modelVersion' ]
25+ def extract_model_id ( data )
26+ data [ 'modelVersion' ]
27+ end
5428
55- # First attempt to extract tool calls
56- tool_calls = nil
29+ def extract_content ( data )
30+ return nil unless data [ 'candidates' ] &. any?
5731
58- # Check if any part contains a functionCall
59- if parts &.any? { |p | p [ 'functionCall' ] }
60- function_part = parts . find { |p | p [ 'functionCall' ] }
61- function_data = function_part [ 'functionCall' ]
32+ candidate = data [ 'candidates' ] [ 0 ]
33+ parts = candidate . dig ( 'content' , 'parts' )
34+ return nil unless parts
6235
63- if function_data && function_data [ 'name' ]
64- # Create a tool call with proper structure - convert args to JSON string
65- id = SecureRandom . uuid
66- tool_calls = {
67- id => ToolCall . new (
68- id : id ,
69- name : function_data [ 'name' ] ,
70- arguments : JSON . generate ( function_data [ 'args' ] ) # Convert Hash to JSON string
71- )
72- }
73- end
74- end
36+ text_parts = parts . select { |p | p [ 'text' ] }
37+ text_parts . map { |p | p [ 'text' ] } . join if text_parts . any?
38+ end
7539
76- # Extract text content (if any)
77- text = nil
78- if parts
79- text_parts = parts . select { |p | p [ 'text' ] }
80- text = text_parts . map { |p | p [ 'text' ] } . join if text_parts . any?
81- end
40+ def extract_input_tokens ( data )
41+ data . dig ( 'usageMetadata' , 'promptTokenCount' )
42+ end
8243
83- chunk = Chunk . new (
84- role : :assistant ,
85- content : text ,
86- model_id : model_id ,
87- input_tokens : data . dig ( 'usageMetadata' , 'promptTokenCount' ) ,
88- output_tokens : data . dig ( 'usageMetadata' , 'candidatesTokenCount' ) ,
89- tool_calls : tool_calls
90- )
44+ def extract_output_tokens ( data )
45+ data . dig ( 'usageMetadata' , 'candidatesTokenCount' )
46+ end
9147
92- accumulator . add ( chunk )
93- block . call ( chunk )
94- end
48+ def parse_streaming_error ( data )
49+ error_data = JSON . parse ( data )
50+ [ error_data [ 'error' ] [ 'code' ] , error_data [ 'error' ] [ 'message' ] ]
51+ rescue JSON ::ParserError => e
52+ RubyLLM . logger . debug "Failed to parse streaming error: #{ e . message } "
53+ [ 500 , "Failed to parse error: #{ data } " ]
9554 end
9655 end
9756 end
0 commit comments