File tree Expand file tree Collapse file tree 1 file changed +9
-3
lines changed
libs/partners/openai/langchain_openai/chat_models Expand file tree Collapse file tree 1 file changed +9
-3
lines changed Original file line number Diff line number Diff line change @@ -1447,8 +1447,10 @@ def _get_encoding_model(self) -> tuple[str, tiktoken.Encoding]:
1447
1447
encoding = tiktoken .encoding_for_model (model )
1448
1448
except KeyError :
1449
1449
encoder = "cl100k_base"
1450
- if self .model_name .startswith ("gpt-4o" ) or self .model_name .startswith (
1451
- "gpt-4.1"
1450
+ if (
1451
+ self .model_name .startswith ("gpt-4o" )
1452
+ or self .model_name .startswith ("gpt-4.1" )
1453
+ or self .model_name .startswith ("gpt-5" )
1452
1454
):
1453
1455
encoder = "o200k_base"
1454
1456
encoding = tiktoken .get_encoding (encoder )
@@ -1499,7 +1501,11 @@ def get_num_tokens_from_messages(
1499
1501
tokens_per_message = 4
1500
1502
# if there's a name, the role is omitted
1501
1503
tokens_per_name = - 1
1502
- elif model .startswith ("gpt-3.5-turbo" ) or model .startswith ("gpt-4" ):
1504
+ elif (
1505
+ model .startswith ("gpt-3.5-turbo" )
1506
+ or model .startswith ("gpt-4" )
1507
+ or model .startswith ("gpt-5" )
1508
+ ):
1503
1509
tokens_per_message = 3
1504
1510
tokens_per_name = 1
1505
1511
else :
You can’t perform that action at this time.
0 commit comments