Skip to content

Commit cefaebf

Browse files
fix(sdk): add gpt-5.2-codex, gpt-5.3-codex, and gpt-5.2 to model-variant detection (#2238)
Co-authored-by: openhands <openhands@all-hands.dev>
1 parent 345a675 commit cefaebf

File tree

2 files changed

+65
-2
lines changed

2 files changed

+65
-2
lines changed

openhands-sdk/openhands/sdk/llm/utils/model_prompt_spec.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,11 @@ class ModelPromptSpec(BaseModel):
3636
# Ordered heuristics to pick the most specific variant available for a family.
3737
_MODEL_VARIANT_PATTERNS: dict[str, tuple[tuple[str, tuple[str, ...]], ...]] = {
3838
"openai_gpt": (
39-
("gpt-5-codex", ("gpt-5-codex", "gpt-5.1-codex")),
40-
("gpt-5", ("gpt-5", "gpt-5.1")),
39+
(
40+
"gpt-5-codex",
41+
("gpt-5-codex", "gpt-5.1-codex", "gpt-5.2-codex", "gpt-5.3-codex"),
42+
),
43+
("gpt-5", ("gpt-5", "gpt-5.1", "gpt-5.2")),
4144
),
4245
}
4346

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
"""Tests for model prompt spec utilities."""
2+
3+
import pytest
4+
5+
from openhands.sdk.llm.utils.model_prompt_spec import (
6+
get_model_prompt_spec,
7+
)
8+
9+
10+
@pytest.mark.parametrize(
11+
("model_name", "canonical_name", "expected_variant"),
12+
[
13+
# Non-codex variants
14+
("gpt-5", None, "gpt-5"),
15+
("gpt-5.1", None, "gpt-5"),
16+
("gpt-5.2", None, "gpt-5"),
17+
# Codex variants
18+
("gpt-5-codex", None, "gpt-5-codex"),
19+
("gpt-5.1-codex", None, "gpt-5-codex"),
20+
("gpt-5.2-codex", None, "gpt-5-codex"),
21+
("gpt-5.3-codex", None, "gpt-5-codex"),
22+
# With canonical names
23+
("gpt-5.2-codex", "openai/gpt-5.2-codex", "gpt-5-codex"),
24+
("gpt-5.3-codex", "openai/gpt-5.3-codex", "gpt-5-codex"),
25+
# Provider-prefixed variants
26+
("openai/gpt-5.2-codex-mini", None, "gpt-5-codex"),
27+
("openai/gpt-5.3-codex-pro", None, "gpt-5-codex"),
28+
],
29+
)
30+
def test_gpt5_variant_detection(
31+
model_name: str,
32+
canonical_name: str | None,
33+
expected_variant: str,
34+
) -> None:
35+
"""Test that GPT-5 variants are correctly detected."""
36+
result = get_model_prompt_spec(model_name, canonical_name)
37+
assert result.variant == expected_variant
38+
assert result.family == "openai_gpt"
39+
40+
41+
@pytest.mark.parametrize(
42+
("model_name", "canonical_name", "expected_family"),
43+
[
44+
("claude-3-5-sonnet-20241022", None, "anthropic_claude"),
45+
("gemini-2.0-flash", None, "google_gemini"),
46+
("llama-3.1-70b-instruct", None, "meta_llama"),
47+
("mistral-large-2411", None, "mistral"),
48+
("deepseek-chat", None, "deepseek"),
49+
("qwen-2.5-72b-instruct", None, "alibaba_qwen"),
50+
],
51+
)
52+
def test_other_families(
53+
model_name: str,
54+
canonical_name: str | None,
55+
expected_family: str,
56+
) -> None:
57+
"""Test that other model families are correctly detected."""
58+
result = get_model_prompt_spec(model_name, canonical_name)
59+
assert result.family == expected_family
60+
assert result.variant is None

0 commit comments

Comments
 (0)