Skip to content

Commit 7557524

Browse files
committed
Get context window and max tokens from litellm if not set
1 parent 722a58f commit 7557524

File tree

1 file changed

+14
-0
lines changed

1 file changed

+14
-0
lines changed

interpreter/core/llm/llm.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,9 @@ def run(self, messages):
6666
And then processing its output, whether it's a function or non function calling model, into LMC format.
6767
"""
6868

69+
if not self._is_loaded:
70+
self.load()
71+
6972
if (
7073
self.max_tokens is not None
7174
and self.context_window is not None
@@ -357,6 +360,17 @@ def load(self):
357360

358361
# Validate LLM should be moved here!!
359362

363+
if self.context_window == None:
364+
try:
365+
model_info = litellm.get_model_info(model=self.model)
366+
self.context_window = model_info["max_input_tokens"]
367+
if self.max_tokens == None:
368+
self.max_tokens = min(
369+
int(self.context_window * 0.2), model_info["max_output_tokens"]
370+
)
371+
except:
372+
pass
373+
360374
self._is_loaded = True
361375

362376

0 commit comments

Comments
 (0)