Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion patchwork/common/client/llm/openai_.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from typing_extensions import Dict, Iterable, List, Optional, Union

from patchwork.common.client.llm.protocol import NOT_GIVEN, LlmClient, NotGiven
from patchwork.logger import logger


@functools.lru_cache
Expand Down Expand Up @@ -87,7 +88,12 @@ def is_prompt_supported(

model_limit = self.__get_model_limits(model)
token_count = 0
encoding = tiktoken.encoding_for_model(model)
encoding = None
try:
encoding = tiktoken.encoding_for_model(model)
except Exception as e:
logger.error(f"Error getting encoding for model {model}: {e}, using gpt-4o as fallback")
encoding = tiktoken.encoding_for_model("gpt-4o")
for message in messages:
message_token_count = len(encoding.encode(message.get("content")))
token_count = token_count + message_token_count
Expand Down
Loading