Skip to content

Commit 2768c4c

Browse files
authored
Merge branch 'main' into feature/paint-order-filtering
2 parents 7794be9 + 0e4c3ef commit 2768c4c

File tree

1 file changed

+15
-14
lines changed

1 file changed

+15
-14
lines changed

browser_use/llm/openai/chat.py

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from collections.abc import Iterable, Mapping
2-
from dataclasses import dataclass
2+
from dataclasses import dataclass, field
33
from typing import Any, Literal, TypeVar, overload
44

55
import httpx
@@ -20,18 +20,6 @@
2020

2121
T = TypeVar('T', bound=BaseModel)
2222

23-
ReasoningModels: list[ChatModel | str] = [
24-
'o4-mini',
25-
'o3',
26-
'o3-mini',
27-
'o1',
28-
'o1-pro',
29-
'o3-pro',
30-
'gpt-5',
31-
'gpt-5-mini',
32-
'gpt-5-nano',
33-
]
34-
3523

3624
@dataclass
3725
class ChatOpenAI(BaseChatModel):
@@ -67,6 +55,19 @@ class ChatOpenAI(BaseChatModel):
6755
http_client: httpx.AsyncClient | None = None
6856
_strict_response_validation: bool = False
6957
max_completion_tokens: int | None = 4096
58+
reasoning_models: list[ChatModel | str] | None = field(
59+
default_factory=lambda: [
60+
'o4-mini',
61+
'o3',
62+
'o3-mini',
63+
'o1',
64+
'o1-pro',
65+
'o3-pro',
66+
'gpt-5',
67+
'gpt-5-mini',
68+
'gpt-5-nano',
69+
]
70+
)
7071

7172
# Static
7273
@property
@@ -180,7 +181,7 @@ async def ainvoke(
180181
if self.service_tier is not None:
181182
model_params['service_tier'] = self.service_tier
182183

183-
if any(str(m).lower() in str(self.model).lower() for m in ReasoningModels):
184+
if self.reasoning_models and any(str(m).lower() in str(self.model).lower() for m in self.reasoning_models):
184185
model_params['reasoning_effort'] = self.reasoning_effort
185186
del model_params['temperature']
186187
del model_params['frequency_penalty']

0 commit comments

Comments
 (0)