13
13
from langchain_ollama import ChatOllama
14
14
from langchain_openai import AzureChatOpenAI , ChatOpenAI
15
15
import gradio as gr
16
- from openai import OpenAI , AzureOpenAI
17
- from google .generativeai import configure , list_models
18
- from langchain_anthropic import AnthropicLLM
19
- from langchain_ollama .llms import OllamaLLM
20
16
21
17
def get_llm_model (provider : str , ** kwargs ):
22
18
"""
@@ -137,56 +133,10 @@ def update_model_dropdown(llm_provider, api_key=None, base_url=None):
137
133
return gr .Dropdown (choices = model_names [llm_provider ], value = model_names [llm_provider ][0 ], interactive = True )
138
134
else :
139
135
return gr .Dropdown (choices = [], value = "" , interactive = True , allow_custom_value = True )
140
-
141
- def fetch_available_models (llm_provider : str , api_key : str = None , base_url : str = None ) -> list [str ]:
142
- """
143
- Fetch available models for the selected LLM provider using API keys from .env by default.
144
- """
145
- try :
146
- # Use API keys from .env if not provided
147
- if not api_key :
148
- api_key = os .getenv (f"{ llm_provider .upper ()} _API_KEY" , "" )
149
- if not base_url :
150
- base_url = os .getenv (f"{ llm_provider .upper ()} _BASE_URL" , "" )
151
-
152
- if llm_provider == "anthropic" :
153
- client = AnthropicLLM (api_key = api_key )
154
- return ["claude-3-5-sonnet-20240620" , "claude-3-opus-20240229" ] # Example models
155
-
156
- elif llm_provider == "openai" :
157
- client = OpenAI (api_key = api_key , base_url = base_url )
158
- models = client .models .list ()
159
- return [model .id for model in models .data ]
160
-
161
- elif llm_provider == "deepseek" :
162
- return ["deepseek-chat" ] # Example model
163
-
164
- elif llm_provider == "gemini" :
165
- configure (api_key = api_key )
166
- models = list_models ()
167
- return [model .name for model in models ]
168
-
169
- elif llm_provider == "ollama" :
170
- client = OllamaLLM (model = "default_model_name" ) # Replace with the actual model name
171
- models = client .models .list ()
172
- return [model .name for model in models ]
173
-
174
- elif llm_provider == "azure_openai" :
175
- client = AzureOpenAI (api_key = api_key , base_url = base_url )
176
- models = client .models .list ()
177
- return [model .id for model in models .data ]
178
-
179
- else :
180
- print (f"Unsupported LLM provider: { llm_provider } " )
181
- return []
182
-
183
- except Exception as e :
184
- print (f"Error fetching models from { llm_provider } : { e } " )
185
- return []
186
136
187
137
def encode_image (img_path ):
188
138
if not img_path :
189
139
return None
190
140
with open (img_path , "rb" ) as fin :
191
141
image_data = base64 .b64encode (fin .read ()).decode ("utf-8" )
192
- return image_data
142
+ return image_data
0 commit comments