@@ -170,35 +170,25 @@ def __init__(self, vector_store: VectorStore = None, model_name: str = None,
170170 self .model_name = model_name
171171 # skip_analysis parameter kept for backward compatibility but no longer used
172172
173- # Check if this is an Ollama model
174- self .is_ollama = model_name and (model_name . startswith ( "ollama:" ) or "Ollama - " in model_name )
173+ # Check if this is an Ollama model (anything not Mistral is considered Ollama)
174+ self .is_ollama = not (model_name and "mistral " in model_name . lower () )
175175
176176 if self .is_ollama :
177- # Extract the actual model name from the prefix
178- # If model_name contains 'ollama:' prefix, remove it
179- # If model_name is from gradio interface (e.g., "Ollama - llama3"), extract just the model name
180- if model_name .startswith ("ollama:" ):
181- ollama_model_name = model_name .replace ("ollama:" , "" )
182- elif "Ollama - " in model_name :
183- ollama_model_name = model_name .replace ("Ollama - " , "" ).strip ()
184- else :
185- ollama_model_name = model_name
186-
187177 # Add :latest suffix if not present
188- if not ollama_model_name .endswith (":latest" ):
189- ollama_model_name = f"{ ollama_model_name } :latest"
178+ if not model_name .endswith (":latest" ):
179+ model_name = f"{ model_name } :latest"
190180
191181 # Load Ollama model
192182 print ("\n Loading Ollama model..." )
193- print (f"Model: { ollama_model_name } " )
183+ print (f"Model: { model_name } " )
194184 print ("Note: Make sure Ollama is running on your system." )
195185
196186 # Initialize Ollama model handler
197- self .ollama_handler = OllamaModelHandler (ollama_model_name )
187+ self .ollama_handler = OllamaModelHandler (model_name )
198188
199189 # Create pipeline-like interface
200190 self .pipeline = self .ollama_handler
201- print (f"Using Ollama model: { ollama_model_name } " )
191+ print (f"Using Ollama model: { model_name } " )
202192 else :
203193 # Only initialize Mistral if no model is specified
204194 if not model_name :
0 commit comments