File tree Expand file tree Collapse file tree 1 file changed +15
-14
lines changed Expand file tree Collapse file tree 1 file changed +15
-14
lines changed Original file line number Diff line number Diff line change 1
1
from collections .abc import Iterable , Mapping
2
- from dataclasses import dataclass
2
+ from dataclasses import dataclass , field
3
3
from typing import Any , Literal , TypeVar , overload
4
4
5
5
import httpx
20
20
21
21
T = TypeVar ('T' , bound = BaseModel )
22
22
23
- ReasoningModels : list [ChatModel | str ] = [
24
- 'o4-mini' ,
25
- 'o3' ,
26
- 'o3-mini' ,
27
- 'o1' ,
28
- 'o1-pro' ,
29
- 'o3-pro' ,
30
- 'gpt-5' ,
31
- 'gpt-5-mini' ,
32
- 'gpt-5-nano' ,
33
- ]
34
-
35
23
36
24
@dataclass
37
25
class ChatOpenAI (BaseChatModel ):
@@ -67,6 +55,19 @@ class ChatOpenAI(BaseChatModel):
67
55
http_client : httpx .AsyncClient | None = None
68
56
_strict_response_validation : bool = False
69
57
max_completion_tokens : int | None = 4096
58
+ reasoning_models : list [ChatModel | str ] | None = field (
59
+ default_factory = lambda : [
60
+ 'o4-mini' ,
61
+ 'o3' ,
62
+ 'o3-mini' ,
63
+ 'o1' ,
64
+ 'o1-pro' ,
65
+ 'o3-pro' ,
66
+ 'gpt-5' ,
67
+ 'gpt-5-mini' ,
68
+ 'gpt-5-nano' ,
69
+ ]
70
+ )
70
71
71
72
# Static
72
73
@property
@@ -180,7 +181,7 @@ async def ainvoke(
180
181
if self .service_tier is not None :
181
182
model_params ['service_tier' ] = self .service_tier
182
183
183
- if any (str (m ).lower () in str (self .model ).lower () for m in ReasoningModels ):
184
+ if self . reasoning_models and any (str (m ).lower () in str (self .model ).lower () for m in self . reasoning_models ):
184
185
model_params ['reasoning_effort' ] = self .reasoning_effort
185
186
del model_params ['temperature' ]
186
187
del model_params ['frequency_penalty' ]
You can’t perform that action at this time.
0 commit comments