Skip to content
This repository was archived by the owner on Mar 19, 2026. It is now read-only.

Commit daf0883

Browse files
authored
Merge pull request #389 from namin/ollama-autoconf
Support provider Ollama for automatic configuration.
2 parents bcf422e + 6cab490 commit daf0883

File tree

4 files changed

+15
-0
lines changed

4 files changed

+15
-0
lines changed

docs/guides/configure-llms.mdx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ At this time, supported providers for automatic configuration include:
4747
| Anthropic | `anthropic` | (included) |
4848
| Google | `google` | `langchain_google_genai` |
4949
| Groq | `groq` | `langchain_groq` |
50+
| Ollama | `ollama` | `langchain-ollama` |
5051

5152
If the required dependencies are not installed, ControlFlow will be unable to load the model and will raise an error.
5253

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ tests = [
5151
"langchain_community",
5252
"langchain_google_genai",
5353
"langchain_groq",
54+
"langchain-ollama',
5455
"pytest-asyncio>=0.18.2,!=0.22.0,<0.23.0",
5556
"pytest-env>=0.8,<2.0",
5657
"pytest-rerunfailures>=10,<14",

src/controlflow/llm/models.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,14 @@ def get_model(
6060
"To use Groq as an LLM provider, please install the `langchain_groq` package."
6161
)
6262
cls = ChatGroq
63+
elif provider == "ollama":
64+
try:
65+
from langchain_ollama import ChatOllama
66+
except ImportError:
67+
raise ImportError(
68+
"To use Ollama as an LLM provider, please install the `langchain-ollama` package."
69+
)
70+
cls = ChatOllama
6371
else:
6472
raise ValueError(
6573
f"Could not load provider `{provider}` automatically. Please provide the LLM class manually."

tests/llm/test_models.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from langchain_anthropic import ChatAnthropic
33
from langchain_google_genai import ChatGoogleGenerativeAI
44
from langchain_groq import ChatGroq
5+
from langchain_ollama import ChatOllama
56
from langchain_openai import AzureChatOpenAI, ChatOpenAI
67

78
from controlflow.llm.models import get_model
@@ -45,6 +46,10 @@ def test_get_groq_model(monkeypatch):
4546
assert isinstance(model, ChatGroq)
4647
assert model.model_name == "mixtral-8x7b-32768"
4748

49+
def test_get_ollama_model(monkeypatch):
50+
model = get_model("ollama/qwen2.5")
51+
assert isinstance(model, ChatOllama)
52+
assert model.model == "qwen2.5"
4853

4954
def test_get_model_with_invalid_format():
5055
with pytest.raises(ValueError, match="The model `gpt-4o` is not valid."):

0 commit comments

Comments
 (0)