Skip to content

Commit 5036bd7

Browse files
authored
fix(openai): don't crash get_num_tokens_from_messages on gpt-5 (#32451)
1 parent ec2b34a commit 5036bd7

File tree

1 file changed

+9
-3
lines changed
  • libs/partners/openai/langchain_openai/chat_models

1 file changed

+9
-3
lines changed

libs/partners/openai/langchain_openai/chat_models/base.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1447,8 +1447,10 @@ def _get_encoding_model(self) -> tuple[str, tiktoken.Encoding]:
14471447
encoding = tiktoken.encoding_for_model(model)
14481448
except KeyError:
14491449
encoder = "cl100k_base"
1450-
if self.model_name.startswith("gpt-4o") or self.model_name.startswith(
1451-
"gpt-4.1"
1450+
if (
1451+
self.model_name.startswith("gpt-4o")
1452+
or self.model_name.startswith("gpt-4.1")
1453+
or self.model_name.startswith("gpt-5")
14521454
):
14531455
encoder = "o200k_base"
14541456
encoding = tiktoken.get_encoding(encoder)
@@ -1499,7 +1501,11 @@ def get_num_tokens_from_messages(
14991501
tokens_per_message = 4
15001502
# if there's a name, the role is omitted
15011503
tokens_per_name = -1
1502-
elif model.startswith("gpt-3.5-turbo") or model.startswith("gpt-4"):
1504+
elif (
1505+
model.startswith("gpt-3.5-turbo")
1506+
or model.startswith("gpt-4")
1507+
or model.startswith("gpt-5")
1508+
):
15031509
tokens_per_message = 3
15041510
tokens_per_name = 1
15051511
else:

0 commit comments

Comments
 (0)