1010
1111import click
1212
13+ from ontobot_change_agent .constants import OWL_EXTENSION
14+
1315try :
1416 from llm_change_agent .cli import execute
17+ from llm_change_agent .utils .llm_utils import (
18+ extract_commands ,
19+ get_anthropic_models ,
20+ get_lbl_cborg_models ,
21+ get_ollama_models ,
22+ get_openai_models ,
23+ )
24+
25+ from ontobot_change_agent .constants import (
26+ ANTHROPIC_PROVIDER ,
27+ CBORG_PROVIDER ,
28+ OLLAMA_PROVIDER ,
29+ OPENAI_PROVIDER ,
30+ )
1531
1632 llm_change_agent_available = True
33+ ALL_AVAILABLE_PROVIDERS = [OPENAI_PROVIDER , OLLAMA_PROVIDER , ANTHROPIC_PROVIDER , CBORG_PROVIDER ]
34+ ALL_AVAILABLE_MODELS = (
35+ get_openai_models () + get_ollama_models () + get_anthropic_models () + get_lbl_cborg_models ()
36+ )
1737except ImportError :
1838 # Handle the case where the package is not installed
1939 llm_change_agent_available = False
40+ ALL_AVAILABLE_PROVIDERS = []
41+ ALL_AVAILABLE_MODELS = []
2042
2143
2244from ontobot_change_agent import __version__
2749 get_ontobot_implementers ,
2850 process_issue_via_jar ,
2951 process_issue_via_oak ,
30- process_new_term_template ,
3152)
32- from ontobot_change_agent .constants import NEW_TERM_LABEL , OWL_EXTENSION
3353
3454__all__ = [
3555 "main" ,
@@ -123,6 +143,12 @@ def main(verbose: int, quiet: bool):
123143 default = False ,
124144 help = "Use llm-change-agent for processing." ,
125145)
146+ llm_provider_option = click .option (
147+ "--provider" , type = click .Choice (ALL_AVAILABLE_PROVIDERS ), help = "Provider to use for generation."
148+ )
149+ llm_model_option = click .option (
150+ "--model" , type = click .Choice (ALL_AVAILABLE_MODELS ), help = "Model to use for generation."
151+ )
126152
127153
128154@main .command ()
@@ -188,6 +214,8 @@ def get_labels(repo: str, token: str):
188214@jar_path_option
189215@output_option
190216@use_llm_option
217+ @llm_provider_option
218+ @llm_model_option
191219def process_issue (
192220 input : str ,
193221 repo : str ,
@@ -200,6 +228,8 @@ def process_issue(
200228 jar_path : str ,
201229 output : str ,
202230 use_llm : bool = False ,
231+ provider : str = None ,
232+ model : str = None ,
203233):
204234 """Run processes based on issue label.
205235
@@ -240,20 +270,7 @@ def process_issue(
240270 KGCL_COMMANDS = []
241271 formatted_body = ""
242272
243- if NEW_TERM_LABEL in issue ["labels" ]:
244- click .echo ("New term label found. Processing new term template..." )
245- formatted_body = "The following input was provided: </br> "
246- KGCL_COMMANDS , body_as_dict , reason = process_new_term_template (
247- issue ["body" ], prefix
248- )
249- if reason is None :
250- click .echo ("No reason found to skip. Converting body to markdown..." )
251- formatted_body += _convert_to_markdown (body_as_dict )
252- formatted_body += "</br> The following commands were executed: </br> "
253- else :
254- click .echo (f"{ issue [TITLE ]} does not need ontobot's attention since { reason } " )
255- break
256- elif ontobot_pattern .match (issue [BODY ].lower ()):
273+ if ontobot_pattern .match (issue [BODY ].lower ()):
257274 click .echo ("Ontobot apply command found. Extracting KGCL commands..." )
258275 formatted_body = "The following commands were executed: </br> "
259276 KGCL_COMMANDS = _get_kgcl_commands (issue [BODY ])
@@ -262,9 +279,9 @@ def process_issue(
262279 click .echo (f"Summoning llm-change-agent for { issue [TITLE ]} " )
263280 with click .Context (execute ) as ctx :
264281 ctx .params ["prompt" ] = issue [BODY ]
265- ctx .params ["provider" ] = "cborg"
266- ctx .params ["model" ] = "google/gemini:latest"
267- response = execute .invoke (ctx )
282+ ctx .params ["provider" ] = provider
283+ ctx .params ["model" ] = model
284+ response = extract_commands ( execute .invoke (ctx ) )
268285 KGCL_COMMANDS = [
269286 command .replace ('"' , "'" ) for command in ast .literal_eval (response )
270287 ]
0 commit comments