Skip to content
This repository was archived by the owner on Mar 19, 2026. It is now read-only.

Commit 9fe8e1b

Browse files
committed
Finesse temperature passing
1 parent 534c145 commit 9fe8e1b

File tree

1 file changed

+7
-1
lines changed

1 file changed

+7
-1
lines changed

src/controlflow/llm/models.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,8 @@ def get_model(
5252
"To use Google as an LLM provider, please install the `langchain_google_genai` package."
5353
)
5454
cls = ChatGoogleGenerativeAI
55+
if temperature is None:
56+
temperature = 0.7
5557
elif provider == "groq":
5658
try:
5759
from langchain_groq import ChatGroq
@@ -60,6 +62,8 @@ def get_model(
6062
"To use Groq as an LLM provider, please install the `langchain_groq` package."
6163
)
6264
cls = ChatGroq
65+
if temperature is None:
66+
temperature = 0.7
6367
elif provider == "ollama":
6468
try:
6569
from langchain_ollama import ChatOllama
@@ -73,7 +77,9 @@ def get_model(
7377
f"Could not load provider `{provider}` automatically. Please provide the LLM class manually."
7478
)
7579

76-
return cls(model=model, temperature=temperature, **kwargs)
80+
if temperature is not None:
81+
kwargs["temperature"] = temperature
82+
return cls(model=model, **kwargs)
7783

7884

7985
def _get_initial_default_model() -> BaseChatModel:

0 commit comments

Comments
 (0)