Skip to content

Commit 4a3a7ac

Browse files
committed
fix: adjust reasoning effort support for o3/o4 models
- Keep supportsReasoningEffort only for base o3, o4-mini, and o3-mini models - Remove supportsReasoningEffort from -high and -low variants - Position supportsReasoningEffort right before reasoningEffort property
1 parent 77b3be7 commit 4a3a7ac

File tree

1 file changed

+3
-9
lines changed

1 file changed

+3
-9
lines changed

packages/types/src/providers/openai.ts

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -78,18 +78,17 @@ export const openAiNativeModels = {
7878
contextWindow: 200_000,
7979
supportsImages: true,
8080
supportsPromptCache: true,
81-
supportsReasoningEffort: true,
8281
inputPrice: 2.0,
8382
outputPrice: 8.0,
8483
cacheReadsPrice: 0.5,
84+
supportsReasoningEffort: true,
8585
reasoningEffort: "medium",
8686
},
8787
"o3-high": {
8888
maxTokens: 100_000,
8989
contextWindow: 200_000,
9090
supportsImages: true,
9191
supportsPromptCache: true,
92-
supportsReasoningEffort: true,
9392
inputPrice: 2.0,
9493
outputPrice: 8.0,
9594
cacheReadsPrice: 0.5,
@@ -100,7 +99,6 @@ export const openAiNativeModels = {
10099
contextWindow: 200_000,
101100
supportsImages: true,
102101
supportsPromptCache: true,
103-
supportsReasoningEffort: true,
104102
inputPrice: 2.0,
105103
outputPrice: 8.0,
106104
cacheReadsPrice: 0.5,
@@ -111,18 +109,17 @@ export const openAiNativeModels = {
111109
contextWindow: 200_000,
112110
supportsImages: true,
113111
supportsPromptCache: true,
114-
supportsReasoningEffort: true,
115112
inputPrice: 1.1,
116113
outputPrice: 4.4,
117114
cacheReadsPrice: 0.275,
115+
supportsReasoningEffort: true,
118116
reasoningEffort: "medium",
119117
},
120118
"o4-mini-high": {
121119
maxTokens: 100_000,
122120
contextWindow: 200_000,
123121
supportsImages: true,
124122
supportsPromptCache: true,
125-
supportsReasoningEffort: true,
126123
inputPrice: 1.1,
127124
outputPrice: 4.4,
128125
cacheReadsPrice: 0.275,
@@ -133,7 +130,6 @@ export const openAiNativeModels = {
133130
contextWindow: 200_000,
134131
supportsImages: true,
135132
supportsPromptCache: true,
136-
supportsReasoningEffort: true,
137133
inputPrice: 1.1,
138134
outputPrice: 4.4,
139135
cacheReadsPrice: 0.275,
@@ -144,18 +140,17 @@ export const openAiNativeModels = {
144140
contextWindow: 200_000,
145141
supportsImages: false,
146142
supportsPromptCache: true,
147-
supportsReasoningEffort: true,
148143
inputPrice: 1.1,
149144
outputPrice: 4.4,
150145
cacheReadsPrice: 0.55,
146+
supportsReasoningEffort: true,
151147
reasoningEffort: "medium",
152148
},
153149
"o3-mini-high": {
154150
maxTokens: 100_000,
155151
contextWindow: 200_000,
156152
supportsImages: false,
157153
supportsPromptCache: true,
158-
supportsReasoningEffort: true,
159154
inputPrice: 1.1,
160155
outputPrice: 4.4,
161156
cacheReadsPrice: 0.55,
@@ -166,7 +161,6 @@ export const openAiNativeModels = {
166161
contextWindow: 200_000,
167162
supportsImages: false,
168163
supportsPromptCache: true,
169-
supportsReasoningEffort: true,
170164
inputPrice: 1.1,
171165
outputPrice: 4.4,
172166
cacheReadsPrice: 0.55,

0 commit comments

Comments
 (0)