@@ -595,14 +595,19 @@ def call_google_gemini_api(prompt_text, api_key, model_name, client=None, google
595595 contents .append (f )
596596 console_log (f"DEBUG API: Added file to contents: { getattr (f , 'display_name' , getattr (f , 'name' , 'unknown' ))} " , "INFO" )
597597 else :
598- console_log ("DEBUG API: google_file_objects is None or empty!" , "WARN" )
598+ # Logic: If no images, we're likely processing text which is appended next.
599+ console_log ("DEBUG API: google_file_objects is empty. Proceeding with prompt text only." , "INFO" )
600+
599601 contents .append (prompt_text )
600- console_log (f"DEBUG API: Total contents list size: { len (contents )} (should be { (len (google_file_objects ) if google_file_objects else 0 ) + 1 } )" , "INFO" )
602+ console_log (f"DEBUG API: Total contents list size: { len (contents )} (Prompt + { (len (google_file_objects ) if google_file_objects else 0 )} files )" , "INFO" )
601603
602604 # Configuration
605+ enable_thinking = kwargs .get ('enable_thinking' , False )
606+ thinking_config = types .ThinkingConfig (include_thoughts = True ) if enable_thinking else None
607+
603608 config = types .GenerateContentConfig (
604609 safety_settings = safety_settings ,
605- thinking_config = types . ThinkingConfig ( include_thoughts = True )
610+ thinking_config = thinking_config
606611 )
607612
608613 if stream_output :
@@ -957,6 +962,7 @@ def process_file_group(filepaths_group, api_key, engine, user_prompt, model_name
957962 stream_output = kwargs ['stream_output' ],
958963 safety_settings = kwargs .get ('safety_settings' ),
959964 enable_web_search = kwargs .get ('enable_web_search' , False ),
965+ enable_thinking = kwargs .get ('enable_thinking' , False ),
960966 clean_markdown = kwargs .get ('clean_markdown' , True ) # Pass clean setting
961967 )
962968
@@ -1195,6 +1201,7 @@ def __init__(self, initial_api_key, command_line_files, args):
11951201 self .output_ext_var = tk .StringVar (value = getattr (self .args , 'output_ext' , '' ))
11961202 # --- UPDATE: STREAM ENABLED BY DEFAULT ---
11971203 self .stream_var = tk .BooleanVar (value = getattr (self .args , 'stream' , True ))
1204+ self .thinking_var = tk .BooleanVar (value = False ) # New for Gemini (v25.8)
11981205 self .add_filename_var = tk .BooleanVar (value = getattr (self .args , 'add_filename_to_prompt' , False ))
11991206 self .group_files_var = tk .BooleanVar (value = False )
12001207 self .group_size_var = tk .IntVar (value = 3 )
@@ -1297,8 +1304,12 @@ def create_widgets(self):
12971304 ttk .Label (tab_ai , text = "Provider:" ).grid (row = 0 , column = 0 , sticky = "w" )
12981305 ttk .Combobox (tab_ai , textvariable = self .engine_var , values = ['google' , 'ollama' , 'lmstudio' ], state = "readonly" ).grid (row = 0 , column = 1 , sticky = "ew" , padx = 5 )
12991306 ttk .Label (tab_ai , text = "Model:" ).grid (row = 1 , column = 0 , sticky = "w" , pady = 5 )
1300- self .model_combo = ttk .Combobox (tab_ai , textvariable = self .model_var , state = "disabled" , width = 50 ); self .model_combo .grid (row = 1 , column = 1 , sticky = "ew" , padx = 5 , pady = 5 )
1301- ttk .Checkbutton (tab_ai , text = "Append Filename to Prompt" , variable = self .add_filename_var ).grid (row = 2 , column = 0 , columnspan = 2 , sticky = "w" )
1307+ self .model_combo = ttk .Combobox (tab_ai , textvariable = self .model_var , state = "disabled" , width = 50 )
1308+ self .model_combo .grid (row = 1 , column = 1 , sticky = "ew" , padx = 5 , pady = 5 )
1309+
1310+ ttk .Checkbutton (tab_ai , text = "Stream Output" , variable = self .stream_var ).grid (row = 2 , column = 0 , columnspan = 2 , sticky = "w" )
1311+ ttk .Checkbutton (tab_ai , text = "Enable Thinking (Gemini)" , variable = self .thinking_var ).grid (row = 3 , column = 0 , columnspan = 2 , sticky = "w" )
1312+ ttk .Checkbutton (tab_ai , text = "Add Filename to Prompt" , variable = self .add_filename_var ).grid (row = 4 , column = 0 , columnspan = 2 , sticky = "w" )
13021313
13031314 self .ollama_search_var = tk .BooleanVar (value = False )
13041315 self .ollama_search_check = ttk .Checkbutton (tab_ai , text = "Enable Web Search (Ollama Only)" , variable = self .ollama_search_var )
@@ -1482,6 +1493,7 @@ def set_var(var, key, default=None):
14821493 set_var (self .output_ext_var , 'output_extension' , "" )
14831494 set_var (self .overwrite_var , 'overwrite_original' , False )
14841495 set_var (self .stream_var , 'stream_output' , True ) # Default True
1496+ set_var (self .thinking_var , 'enable_thinking' , False ) # Default False
14851497 set_var (self .add_filename_var , 'add_filename_to_prompt' , False )
14861498 set_var (self .ollama_search_var , 'enable_web_search' , False )
14871499
@@ -1554,7 +1566,8 @@ def get_current_settings_dict(self):
15541566 'img_max_dim' : self .img_max_dim_var .get (),
15551567 'force_conversion' : self .force_conversion_var .get (),
15561568 'save_img_to_output' : self .save_img_to_output_var .get (),
1557- 'rename_mode' : self .rename_mode_var .get ()
1569+ 'rename_mode' : self .rename_mode_var .get (),
1570+ 'enable_thinking' : self .thinking_var .get ()
15581571 }
15591572
15601573 def save_current_preset (self ):
@@ -1993,6 +2006,7 @@ def _worker(self):
19932006
19942007 self .result_queue .put ({'job_id' : jid , 'status' : 'Running' })
19952008 params = job .copy (); params .pop ('job_id' )
2009+ params ['enable_thinking' ] = self .thinking_var .get () # Pass GUI state
19962010
19972011 if self .global_runtime_overrides :
19982012 params ['engine' ] = self .global_runtime_overrides ['engine' ]
0 commit comments