Skip to content

Commit 0f3ce3c

Browse files
eamodiod13
authored andcommitted
Updates OpenAI model selection
1 parent 196ae23 commit 0f3ce3c

File tree

1 file changed

+72
-37
lines changed

1 file changed

+72
-37
lines changed

src/plus/ai/models/model.ts

Lines changed: 72 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -60,80 +60,97 @@ export interface AIProviderDescriptorWithType<T extends AIProviders = AIProvider
6060

6161
export const openAIModels = <T extends OpenAIProviders>(provider: AIProviderDescriptor<T>): AIModel<T>[] => [
6262
{
63-
id: 'gpt-4.1',
64-
name: 'GPT-4.1',
65-
maxTokens: { input: 1047576, output: 32768 },
63+
id: 'gpt-5',
64+
name: 'GPT-5',
65+
maxTokens: { input: 400000, output: 128000 },
6666
provider: provider,
6767
},
6868
{
69-
id: 'gpt-4.1-2025-04-14',
70-
name: 'GPT-4.1 (2025-04-14)',
71-
maxTokens: { input: 1047576, output: 32768 },
69+
id: 'gpt-5-2025-08-07',
70+
name: 'GPT-5',
71+
maxTokens: { input: 400000, output: 128000 },
7272
provider: provider,
7373
hidden: true,
7474
},
7575
{
76-
id: 'gpt-4.1-mini',
77-
name: 'GPT-4.1 mini',
78-
maxTokens: { input: 1047576, output: 32768 },
76+
id: 'gpt-5-mini',
77+
name: 'GPT-5 mini',
78+
maxTokens: { input: 400000, output: 128000 },
7979
provider: provider,
80+
default: true,
8081
},
8182
{
82-
id: 'gpt-4.1-mini-2025-04-14',
83-
name: 'GPT-4.1 mini (2025-04-14)',
84-
maxTokens: { input: 1047576, output: 32768 },
83+
id: 'gpt-5-mini-2025-08-07',
84+
name: 'GPT-5 mini',
85+
maxTokens: { input: 400000, output: 128000 },
8586
provider: provider,
8687
hidden: true,
8788
},
8889
{
89-
id: 'gpt-4.1-nano',
90-
name: 'GPT-4.1 nano',
91-
maxTokens: { input: 1047576, output: 32768 },
90+
id: 'gpt-5-nano',
91+
name: 'GPT-5 nano',
92+
maxTokens: { input: 400000, output: 128000 },
9293
provider: provider,
9394
},
9495
{
95-
id: 'gpt-4.1-nano-2025-04-14',
96-
name: 'GPT-4.1 nano (2025-04-14)',
97-
maxTokens: { input: 1047576, output: 32768 },
96+
id: 'gpt-5-nano-2025-08-07',
97+
name: 'GPT-5 nano',
98+
maxTokens: { input: 400000, output: 128000 },
9899
provider: provider,
99100
hidden: true,
100101
},
101102
{
102-
id: 'gpt-5',
103-
name: 'GPT-5',
104-
maxTokens: { input: 400000, output: 128000 },
103+
id: 'gpt-5-pro',
104+
name: 'GPT-5 Pro',
105+
maxTokens: { input: 400000, output: 272000 },
105106
provider: provider,
107+
hidden: true,
106108
},
107109
{
108-
id: 'gpt-5-2025-08-07',
109-
name: 'GPT-5',
110-
maxTokens: { input: 400000, output: 128000 },
110+
id: 'gpt-5-pro-2025-10-06',
111+
name: 'GPT-5 Pro (2025-10-06)',
112+
maxTokens: { input: 400000, output: 272000 },
111113
provider: provider,
112114
hidden: true,
113115
},
114116
{
115-
id: 'gpt-5-mini',
116-
name: 'GPT-5 mini',
117-
maxTokens: { input: 400000, output: 128000 },
117+
id: 'gpt-4.1',
118+
name: 'GPT-4.1',
119+
maxTokens: { input: 1047576, output: 32768 },
118120
provider: provider,
119121
},
120122
{
121-
id: 'gpt-5-mini-2025-08-07',
122-
name: 'GPT-5 mini',
123-
maxTokens: { input: 400000, output: 128000 },
123+
id: 'gpt-4.1-2025-04-14',
124+
name: 'GPT-4.1 (2025-04-14)',
125+
maxTokens: { input: 1047576, output: 32768 },
124126
provider: provider,
125127
hidden: true,
126128
},
127129
{
128-
id: 'gpt-5-nano',
129-
name: 'GPT-5 nano',
130-
maxTokens: { input: 400000, output: 128000 },
130+
id: 'gpt-4.1-mini',
131+
name: 'GPT-4.1 mini',
132+
maxTokens: { input: 1047576, output: 32768 },
131133
provider: provider,
134+
hidden: true,
132135
},
133136
{
134-
id: 'gpt-5-nano-2025-08-07',
135-
name: 'GPT-5 nano',
136-
maxTokens: { input: 400000, output: 128000 },
137+
id: 'gpt-4.1-mini-2025-04-14',
138+
name: 'GPT-4.1 mini (2025-04-14)',
139+
maxTokens: { input: 1047576, output: 32768 },
140+
provider: provider,
141+
hidden: true,
142+
},
143+
{
144+
id: 'gpt-4.1-nano',
145+
name: 'GPT-4.1 nano',
146+
maxTokens: { input: 1047576, output: 32768 },
147+
provider: provider,
148+
hidden: true,
149+
},
150+
{
151+
id: 'gpt-4.1-nano-2025-04-14',
152+
name: 'GPT-4.1 nano (2025-04-14)',
153+
maxTokens: { input: 1047576, output: 32768 },
137154
provider: provider,
138155
hidden: true,
139156
},
@@ -152,6 +169,22 @@ export const openAIModels = <T extends OpenAIProviders>(provider: AIProviderDesc
152169
temperature: null,
153170
hidden: true,
154171
},
172+
{
173+
id: 'o3-deep-research',
174+
name: 'o3 Deep Research',
175+
maxTokens: { input: 200000, output: 100000 },
176+
provider: provider,
177+
temperature: null,
178+
hidden: true,
179+
},
180+
{
181+
id: 'o3-deep-research-2025-06-26',
182+
name: 'o3 Deep Research (2025-06-26)',
183+
maxTokens: { input: 200000, output: 100000 },
184+
provider: provider,
185+
temperature: null,
186+
hidden: true,
187+
},
155188
{
156189
id: 'o3',
157190
name: 'o3',
@@ -195,6 +228,7 @@ export const openAIModels = <T extends OpenAIProviders>(provider: AIProviderDesc
195228
maxTokens: { input: 200000, output: 100000 },
196229
provider: provider,
197230
temperature: null,
231+
hidden: true,
198232
},
199233
{
200234
id: 'o1-2024-12-17',
@@ -226,6 +260,7 @@ export const openAIModels = <T extends OpenAIProviders>(provider: AIProviderDesc
226260
maxTokens: { input: 128000, output: 65536 },
227261
provider: provider,
228262
temperature: null,
263+
hidden: true,
229264
},
230265
{
231266
id: 'o1-mini-2024-09-12',
@@ -240,7 +275,6 @@ export const openAIModels = <T extends OpenAIProviders>(provider: AIProviderDesc
240275
name: 'GPT-4o',
241276
maxTokens: { input: 128000, output: 16384 },
242277
provider: provider,
243-
default: true,
244278
},
245279
{
246280
id: 'gpt-4o-2024-11-20',
@@ -275,6 +309,7 @@ export const openAIModels = <T extends OpenAIProviders>(provider: AIProviderDesc
275309
name: 'GPT-4o mini',
276310
maxTokens: { input: 128000, output: 16384 },
277311
provider: provider,
312+
hidden: true,
278313
},
279314
{
280315
id: 'gpt-4o-mini-2024-07-18',

0 commit comments

Comments
 (0)