1717from ghidra .util .exception import CancelledException
1818from ghidra .util .task import TaskMonitor
1919
20+
2021class Config :
2122 SCRIPT_DIR = os .path .dirname (os .path .realpath (__file__ ))
2223 CONFIG_FILE_PATH = os .path .join (SCRIPT_DIR + "/ghidrollama_utils" , "ghidrollama_config.json" )
@@ -89,22 +90,25 @@ def select_model(scheme, host, port):
8990 Makes a request to the Ollama API to fetch a list of installed models, prompts user to select which model to use.
9091 Requires a valid hostname/ip to be set first.
9192 """
92-
93- url = "{}://{}:{}/api/tags" .format (scheme , host , port )
93+
94+ # Check if the server is LMStudio or Ollama
95+ url = "{}://{}:{}/{}" .format (scheme , host , port , ("api/tags" if CONFIG .server_type == "ollama" else "v1/models" ))
96+
9497 choice = None
9598 try :
9699 model_list_response = urllib2 .urlopen (url )
97100 data = json .load (model_list_response )
98101
99102 model_names = []
100- for model in data ['models' ]:
101- model_names .append (model ['name' ])
103+ for model in data ['models' if CONFIG . server_type == "ollama" else 'data' ]:
104+ model_names .append (model ['name' if CONFIG . server_type == "ollama" else 'id' ])
102105
103106 if len (model_names ) == 0 :
104107 print ("No models found. Did you pull models via the Ollama CLI?" )
105108 return None
106-
107- choice = askChoice ("GhidrOllama" , "Please choose the model you want to use:" , model_names , "Model Selection" )
109+
110+ choice = askChoice ("GhidrOllama" , "Please select a model:" , model_names , "Model Selection" )
111+ print ("Selected model: " + choice )
108112
109113 except urllib2 .HTTPError as e :
110114 print ("HTTP Error {}: {}" .format (e .code , e .reason ))
@@ -126,6 +130,7 @@ def valid(self):
126130 """
127131
128132 c = self .config
133+
129134 try :
130135 if c ["host" ] == None or c ["port" ] == None or c ["model" ] == None or c ["scheme" ] == None or c ["first_run" ] == None or c ["set_comments" ] == None or c ["auto_rename" ] == None :
131136 return False
@@ -164,6 +169,22 @@ def reconfigure(self, monitor):
164169 """
165170 Guide the user through setting new configuration values.
166171 """
172+
173+ # Get server type
174+ monitor .setMessage ("Waiting for server type select..." )
175+ try :
176+ server_type = askChoice ("GhidrOllama" , "Please choose the server type:" , ["ollama" , "lmstudio" ], "Server Type Selection" )
177+ except CancelledException :
178+ return False
179+ print ("Selected server type: " + server_type )
180+ if server_type == None :
181+ return False
182+
183+ # Update self.server_type immediately so that Config.select_model (via list_models)
184+ # uses the newly selected server type.
185+ self .server_type = server_type
186+ self .config ["server_type" ] = server_type
187+
167188 # Get hostname
168189 monitor .setMessage ("Waiting for hostname" )
169190 try :
@@ -250,7 +271,6 @@ def reconfigure(self, monitor):
250271 self .save ()
251272 return True
252273
253-
254274 def change_model (self , monitor ):
255275 """Change the configured model and persist the change.
256276 Return true on success."""
@@ -308,7 +328,7 @@ def get_endpoint(self, endpoint):
308328def interact_with_ollama (model , system_prompt , prompt , c_code ):
309329 monitor .setMessage ("Model " + model + " is processing input..." )
310330 print ("\n >> Explanation:" )
311- url = CONFIG .get_endpoint ("/v1/chat/completions " )
331+ url = CONFIG .get_endpoint ("/api/generate " )
312332 data = {
313333 "model" : model ,
314334 "system" : system_prompt ,
0 commit comments