Skip to content

Commit 873e047

Browse files
authored
🧠 feat: Implement O1 Model Support for Max Tokens Handling (danny-avila#4376)
1 parent bdc2fd3 commit 873e047

File tree

1 file changed

+11
-3
lines changed

1 file changed

+11
-3
lines changed

api/app/clients/OpenAIClient.js

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,8 @@ class OpenAIClient extends BaseClient {
6868

6969
/** @type {OpenAIUsageMetadata | undefined} */
7070
this.usage;
71+
/** @type {boolean|undefined} */
72+
this.isO1Model;
7173
}
7274

7375
// TODO: PluginsClient calls this 3x, unneeded
@@ -98,6 +100,8 @@ class OpenAIClient extends BaseClient {
98100
this.options.modelOptions,
99101
);
100102

103+
this.isO1Model = /\bo1\b/i.test(this.modelOptions.model);
104+
101105
this.defaultVisionModel = this.options.visionModel ?? 'gpt-4-vision-preview';
102106
if (typeof this.options.attachments?.then === 'function') {
103107
this.options.attachments.then((attachments) => this.checkVisionRequest(attachments));
@@ -545,8 +549,7 @@ class OpenAIClient extends BaseClient {
545549
promptPrefix = this.augmentedPrompt + promptPrefix;
546550
}
547551

548-
const isO1Model = /\bo1\b/i.test(this.modelOptions.model);
549-
if (promptPrefix && !isO1Model) {
552+
if (promptPrefix && this.isO1Model !== true) {
550553
promptPrefix = `Instructions:\n${promptPrefix.trim()}`;
551554
instructions = {
552555
role: 'system',
@@ -575,7 +578,7 @@ class OpenAIClient extends BaseClient {
575578
};
576579

577580
/** EXPERIMENTAL */
578-
if (promptPrefix && isO1Model) {
581+
if (promptPrefix && this.isO1Model === true) {
579582
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
580583
if (lastUserMessageIndex !== -1) {
581584
payload[
@@ -1227,6 +1230,11 @@ ${convo}
12271230
opts.defaultHeaders = { ...opts.defaultHeaders, 'api-key': this.apiKey };
12281231
}
12291232

1233+
if (this.isO1Model === true && modelOptions.max_tokens != null) {
1234+
modelOptions.max_completion_tokens = modelOptions.max_tokens;
1235+
delete modelOptions.max_tokens;
1236+
}
1237+
12301238
if (process.env.OPENAI_ORGANIZATION) {
12311239
opts.organization = process.env.OPENAI_ORGANIZATION;
12321240
}

0 commit comments

Comments
 (0)