Skip to content

Commit 8fdceeb

Browse files
authored
πŸ› Throws an exception when there are no more tokens available #954
2 parents da32c0c + 062f5a7 commit 8fdceeb

File tree

2 files changed

+16
-0
lines changed

2 files changed

+16
-0
lines changed

β€Žsdk/nexent/core/models/openai_long_context_model.pyβ€Ž

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,12 @@ def prepare_long_text_message(self, text_content: str, system_prompt: str, user_
161161
# Reserve tokens for text content
162162
available_tokens = self.max_context_tokens - system_tokens - user_prompt_tokens - 100 # Reserve 100 tokens as buffer
163163

164+
# Check if there are sufficient tokens available
165+
if available_tokens <= 0:
166+
error_msg = f"Insufficient tokens available. Required: {system_tokens + user_prompt_tokens + 100}, Available: {self.max_context_tokens}, Shortage: {abs(available_tokens)}"
167+
logger.error(error_msg)
168+
raise ValueError(error_msg)
169+
164170
# Truncate the text content
165171
truncated_text = self.truncate_text(text_content, available_tokens)
166172
final_content_tokens = self.count_tokens(truncated_text)

β€Žtest/sdk/core/models/test_openai_long_context_model.pyβ€Ž

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -223,3 +223,13 @@ def test_truncation_strategies_comparison(long_context_model):
223223
assert start_result != end_result
224224
assert start_result != middle_result
225225
assert end_result != middle_result
226+
227+
228+
def test_prepare_long_text_message_insufficient_tokens(long_context_model):
229+
"""Test that ValueError is raised when there are insufficient tokens available"""
230+
# Mock count_tokens to return high values that exceed max_context_tokens
231+
long_context_model.count_tokens = MagicMock(side_effect=[50000, 40000, 1000]) # system + user + content
232+
long_context_model.max_context_tokens = 80000 # Less than required (50000 + 40000 + 100)
233+
234+
with pytest.raises(ValueError, match="Insufficient tokens available"):
235+
long_context_model.prepare_long_text_message("content", "system prompt", "user prompt")

0 commit comments

Comments
Β (0)