Skip to content

Commit 069808f

Browse files
authored
Update lm.py
1 parent 4ef3841 commit 069808f

File tree

1 file changed

+3
-4
lines changed

1 file changed

+3
-4
lines changed

dspy/clients/lm.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,7 @@ def __init__(self, model, model_type="chat", temperature=0.0, max_tokens=1000, c
2121
self.model = model
2222
self.model_type = model_type
2323
self.cache = cache
24-
self.temperature = temperature
25-
self.max_tokens = max_tokens
26-
self.kwargs = kwargs
24+
self.kwargs = dict(temperature=temperature, max_tokens=max_tokens, **kwargs)
2725
self.history = []
2826

2927
if "o1-" in model:
@@ -35,7 +33,7 @@ def __call__(self, prompt=None, messages=None, **kwargs):
3533
# Build the request.
3634
cache = kwargs.pop("cache", self.cache)
3735
messages = messages or [{"role": "user", "content": prompt}]
38-
kwargs = {"temperature": self.temperature, "max_tokens": self.max_tokens, **self.kwargs, **kwargs}
36+
kwargs = {**self.kwargs, **kwargs}
3937

4038
# Make the request and handle LRU & disk caching.
4139
if self.model_type == "chat":
@@ -59,6 +57,7 @@ def __call__(self, prompt=None, messages=None, **kwargs):
5957
model_type=self.model_type,
6058
)
6159
self.history.append(entry)
60+
6261
return outputs
6362

6463
def inspect_history(self, n: int = 1):

0 commit comments

Comments
 (0)