Skip to content

Commit 160f83f

Browse files
Update utils.py
1 parent 4540348 commit 160f83f

File tree

1 file changed

+1
-51
lines changed

1 file changed

+1
-51
lines changed

src/utils/utils.py

Lines changed: 1 addition & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,6 @@
1313
from langchain_ollama import ChatOllama
1414
from langchain_openai import AzureChatOpenAI, ChatOpenAI
1515
import gradio as gr
16-
from openai import OpenAI, AzureOpenAI
17-
from google.generativeai import configure, list_models
18-
from langchain_anthropic import AnthropicLLM
19-
from langchain_ollama.llms import OllamaLLM
2016

2117
def get_llm_model(provider: str, **kwargs):
2218
"""
@@ -137,56 +133,10 @@ def update_model_dropdown(llm_provider, api_key=None, base_url=None):
137133
return gr.Dropdown(choices=model_names[llm_provider], value=model_names[llm_provider][0], interactive=True)
138134
else:
139135
return gr.Dropdown(choices=[], value="", interactive=True, allow_custom_value=True)
140-
141-
def fetch_available_models(llm_provider: str, api_key: str = None, base_url: str = None) -> list[str]:
142-
"""
143-
Fetch available models for the selected LLM provider using API keys from .env by default.
144-
"""
145-
try:
146-
# Use API keys from .env if not provided
147-
if not api_key:
148-
api_key = os.getenv(f"{llm_provider.upper()}_API_KEY", "")
149-
if not base_url:
150-
base_url = os.getenv(f"{llm_provider.upper()}_BASE_URL", "")
151-
152-
if llm_provider == "anthropic":
153-
client = AnthropicLLM(api_key=api_key)
154-
return ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229"] # Example models
155-
156-
elif llm_provider == "openai":
157-
client = OpenAI(api_key=api_key, base_url=base_url)
158-
models = client.models.list()
159-
return [model.id for model in models.data]
160-
161-
elif llm_provider == "deepseek":
162-
return ["deepseek-chat"] # Example model
163-
164-
elif llm_provider == "gemini":
165-
configure(api_key=api_key)
166-
models = list_models()
167-
return [model.name for model in models]
168-
169-
elif llm_provider == "ollama":
170-
client = OllamaLLM(model="default_model_name") # Replace with the actual model name
171-
models = client.models.list()
172-
return [model.name for model in models]
173-
174-
elif llm_provider == "azure_openai":
175-
client = AzureOpenAI(api_key=api_key, base_url=base_url)
176-
models = client.models.list()
177-
return [model.id for model in models.data]
178-
179-
else:
180-
print(f"Unsupported LLM provider: {llm_provider}")
181-
return []
182-
183-
except Exception as e:
184-
print(f"Error fetching models from {llm_provider}: {e}")
185-
return []
186136

187137
def encode_image(img_path):
188138
if not img_path:
189139
return None
190140
with open(img_path, "rb") as fin:
191141
image_data = base64.b64encode(fin.read()).decode("utf-8")
192-
return image_data
142+
return image_data

0 commit comments

Comments
 (0)