|
13 | 13 | from langchain_google_genai import ChatGoogleGenerativeAI
|
14 | 14 | from langchain_ollama import ChatOllama
|
15 | 15 | from langchain_openai import AzureChatOpenAI, ChatOpenAI
|
| 16 | +from langchain_ibm import ChatWatsonx |
16 | 17 |
|
17 | 18 | from .llm import DeepSeekR1ChatOpenAI, DeepSeekR1ChatOllama
|
18 | 19 |
|
|
24 | 25 | "google": "Google",
|
25 | 26 | "alibaba": "Alibaba",
|
26 | 27 | "moonshot": "MoonShot",
|
27 |
| - "unbound": "Unbound AI" |
| 28 | + "unbound": "Unbound AI", |
| 29 | + "ibm": "IBM" |
28 | 30 | }
|
29 | 31 |
|
30 | 32 |
|
@@ -154,6 +156,23 @@ def get_llm_model(provider: str, **kwargs):
|
154 | 156 | base_url=base_url,
|
155 | 157 | api_key=api_key,
|
156 | 158 | )
|
| 159 | + elif provider == "ibm": |
| 160 | + parameters = { |
| 161 | + "temperature": kwargs.get("temperature", 0.0), |
| 162 | + "max_tokens": kwargs.get("num_ctx", 32000) |
| 163 | + } |
| 164 | + if not kwargs.get("base_url", ""): |
| 165 | + base_url = os.getenv("IBM_ENDPOINT", "https://us-south.ml.cloud.ibm.com") |
| 166 | + else: |
| 167 | + base_url = kwargs.get("base_url") |
| 168 | + |
| 169 | + return ChatWatsonx( |
| 170 | + model_id=kwargs.get("model_name", "ibm/granite-vision-3.1-2b-preview"), |
| 171 | + url=base_url, |
| 172 | + project_id=os.getenv("IBM_PROJECT_ID"), |
| 173 | + apikey=os.getenv("IBM_API_KEY"), |
| 174 | + params=parameters |
| 175 | + ) |
157 | 176 | elif provider == "moonshot":
|
158 | 177 | return ChatOpenAI(
|
159 | 178 | model=kwargs.get("model_name", "moonshot-v1-32k-vision-preview"),
|
@@ -234,6 +253,7 @@ def get_llm_model(provider: str, **kwargs):
|
234 | 253 | "Pro/THUDM/chatglm3-6b",
|
235 | 254 | "Pro/THUDM/glm-4-9b-chat",
|
236 | 255 | ],
|
| 256 | + "ibm": ["ibm/granite-vision-3.1-2b-preview", "meta-llama/llama-4-maverick-17b-128e-instruct-fp8","meta-llama/llama-3-2-90b-vision-instruct"] |
237 | 257 | }
|
238 | 258 |
|
239 | 259 |
|
|
0 commit comments