Skip to content

Commit d24c208

Browse files
authored
feat: gpt-5-chat support (#20774)
* added gpt-5-chat support * uv lock * updated * updated tests * updatec context window
1 parent c730192 commit d24c208

File tree

4 files changed

+27
-2
lines changed

4 files changed

+27
-2
lines changed

llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/utils.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@
5959
# gpt-5 is a reasoning model, putting it in the o models list
6060
"gpt-5": 400000,
6161
"gpt-5-2025-08-07": 400000,
62+
"gpt-5-chat": 128000,
6263
"gpt-5-mini": 400000,
6364
"gpt-5-mini-2025-08-07": 400000,
6465
"gpt-5-nano": 400000,

llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ dev = [
2727

2828
[project]
2929
name = "llama-index-llms-openai"
30-
version = "0.6.19"
30+
version = "0.6.20"
3131
description = "llama-index llms openai integration"
3232
authors = [{name = "llama-index"}]
3333
requires-python = ">=3.9,<4.0"

llama-index-integrations/llms/llama-index-llms-openai/tests/test_openai_utils.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -477,3 +477,27 @@ def test_gpt_5_chat_latest_model_support() -> None:
477477
def test_is_chatcomp_api_supported() -> None:
478478
assert is_chatcomp_api_supported("gpt-5.2")
479479
assert not is_chatcomp_api_supported("gpt-5.2-pro")
480+
481+
482+
def test_gpt_5_chat_model_support() -> None:
483+
"""Test that gpt-5-chat is properly supported."""
484+
model_name = "gpt-5-chat"
485+
486+
assert model_name in ALL_AVAILABLE_MODELS, (
487+
f"{model_name} should be in ALL_AVAILABLE_MODELS"
488+
)
489+
490+
assert is_chat_model(model_name) is True, (
491+
f"{model_name} should be recognized as a chat model"
492+
)
493+
494+
assert is_function_calling_model(model_name) is True, (
495+
f"{model_name} should support function calling"
496+
)
497+
498+
context_size = openai_modelname_to_contextsize(model_name)
499+
assert context_size == 128000, (
500+
f"{model_name} should have 128000 tokens context, got {context_size}"
501+
)
502+
503+
assert model_name in CHAT_MODELS, f"{model_name} should be in CHAT_MODELS"

llama-index-integrations/llms/llama-index-llms-openai/uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)