@@ -26,14 +26,17 @@ import {
2626import {
2727 OPENAI_API_HOST ,
2828 OPENAI_API_HOST_BACKUP ,
29+ OPENAI_API_KEY ,
30+ OPENAI_API_KEY_BACKUP ,
2931 OPENAI_API_TYPE ,
3032 OPENAI_API_VERSION ,
3133 OPENAI_ORGANIZATION ,
3234 SWITCH_BACK_TO_PRIMARY_HOST_TIMEOUT_MS
3335} from "@/utils/app/const"
3436
3537// Host switching mechanism.
36- let currentHost = OPENAI_API_HOST
38+ let currentHost = ""
39+ let currentApiKey = ""
3740let switchBackToPrimaryHostTime : number | undefined = undefined
3841
3942function switchToBackupHost ( ) : void {
@@ -42,24 +45,28 @@ function switchToBackupHost(): void {
4245 `Switching to backup host: ${ OPENAI_API_HOST_BACKUP } for the next ${ SWITCH_BACK_TO_PRIMARY_HOST_TIMEOUT_MS / 60000 } minutes.`
4346 )
4447 currentHost = OPENAI_API_HOST_BACKUP
48+ currentApiKey = OPENAI_API_KEY_BACKUP
4549 switchBackToPrimaryHostTime = Date . now ( ) + SWITCH_BACK_TO_PRIMARY_HOST_TIMEOUT_MS
50+ } else {
51+ switchBackToPrimaryHostIfNeeded ( true )
4652 }
4753}
4854
49- function switchBackToPrimaryHostIfNeeded ( ) : void {
50- if ( currentHost !== OPENAI_API_HOST && switchBackToPrimaryHostTime && Date . now ( ) >= switchBackToPrimaryHostTime ) {
51- console . log ( `Switching to primary host: ${ OPENAI_API_HOST } ` )
55+ function switchBackToPrimaryHostIfNeeded ( forced = false ) : void {
56+ if ( forced || ! currentHost || ( currentHost !== OPENAI_API_HOST && switchBackToPrimaryHostTime && Date . now ( ) >= switchBackToPrimaryHostTime ) ) {
57+ console . log ( `Switching back to primary host${ forced ? " (forced)" : "" } : ${ OPENAI_API_HOST } ` )
5258 currentHost = OPENAI_API_HOST
59+ currentApiKey = OPENAI_API_KEY
5360 switchBackToPrimaryHostTime = undefined
5461 }
5562}
5663
5764function createGetModelsUrls ( host : string ) : string {
58- let url = `${ host } /v1/models`
65+ let url = `${ host } /v1/models?api-version= ${ OPENAI_API_VERSION } `
5966 if ( OPENAI_API_TYPE === "azure" ) {
6067 url = `${ host } /openai/models?api-version=${ OPENAI_API_VERSION } `
6168 }
62- console . debug ( `Get models (${ OPENAI_API_TYPE } ): ${ url } ` )
69+ console . debug ( `Get models (for ${ OPENAI_API_TYPE } ): ${ url } ` )
6370 return url
6471}
6572
@@ -73,14 +80,15 @@ async function processModelsResponse(response: Response): Promise<Response> {
7380 const removeVisibleModels = OPENAI_API_TYPE === "azure" ? [ "gpt-35-turbo-16k" , "gpt-4" , "gpt-4-32k" ] : [ ]
7481
7582 // Find models to display.
76- const models : OpenAIModel [ ] = json . data . map ( ( model : any ) => {
77- return {
78- id : model . id ,
79- inputTokenLimit : maxInputTokensForModel ( model . id ) ,
80- outputTokenLimit : maxOutputTokensForModel ( model . id ) ,
81- isOpenAiReasoningModel : isOpenAIReasoningModel ( model . id )
82- }
83- } )
83+ const models : OpenAIModel [ ] = json . data
84+ . map ( ( model : any ) => {
85+ return {
86+ id : model . id ,
87+ inputTokenLimit : maxInputTokensForModel ( model . id ) ,
88+ outputTokenLimit : maxOutputTokensForModel ( model . id ) ,
89+ isOpenAiReasoningModel : isOpenAIReasoningModel ( model . id )
90+ }
91+ } )
8492 . filter ( ( model : any ) => ! removeVisibleModels . includes ( model . id ) )
8593 . concat ( addHiddenModels )
8694 . filter ( ( model : OpenAIModel ) => {
@@ -92,7 +100,7 @@ async function processModelsResponse(response: Response): Promise<Response> {
92100 }
93101 } )
94102 . sort ( ( a : OpenAIModel , b : OpenAIModel ) => a . id . localeCompare ( b . id ) )
95- console . debug ( `Found ${ models . length } models: ${ models . map ( ( model ) => model . id ) . join ( ", " ) } ` )
103+ console . debug ( `Found ${ models . length } models` )
96104 return new Response ( JSON . stringify ( models ) , { status : 200 } )
97105}
98106
@@ -106,18 +114,17 @@ const handler = async (req: Request): Promise<Response> => {
106114 // Compose URL to get models.
107115 let url = createGetModelsUrls ( currentHost )
108116
109- // Compose HTTP headers.
110117 const headers = {
111118 "Content-Type" : "application/json" ,
112119 ...( OPENAI_API_TYPE === "openai" && {
113- Authorization : `Bearer ${ apiKey || process . env . OPENAI_API_KEY } `
120+ Authorization : `Bearer ${ currentApiKey . length > 0 ? currentApiKey : apiKey } `
114121 } ) ,
115122 ...( OPENAI_API_TYPE === "openai" &&
116123 OPENAI_ORGANIZATION && {
117124 "OpenAI-Organization" : OPENAI_ORGANIZATION
118125 } ) ,
119126 ...( OPENAI_API_TYPE === "azure" && {
120- "api-key" : ` ${ apiKey || process . env . OPENAI_API_KEY } `
127+ "api-key" : currentApiKey . length > 0 ? currentApiKey : apiKey
121128 } )
122129 }
123130
@@ -129,39 +136,40 @@ const handler = async (req: Request): Promise<Response> => {
129136 return await processModelsResponse ( response )
130137 } else {
131138 // Primary host response not OK. This should not cause a switch to the backup host.
132- console . error ( `Primary host for getting models for '${ OPENAI_API_TYPE } ' returned an error: ${ JSON . stringify ( response ) } ` )
139+ console . error (
140+ `Primary host for getting models for '${ OPENAI_API_TYPE } ' returned an error: ${ JSON . stringify ( response ) } `
141+ )
133142 responseInit = { status : 500 , statusText : response ? JSON . stringify ( response ) : "" }
134143 }
135144 } catch ( error ) {
136145 // Primary host response returns HTTP error.
137146 console . error ( `Primary host for '${ OPENAI_API_TYPE } ' threw an exception; ${ JSON . stringify ( error ) } ` )
138- if (
139- currentHost !== OPENAI_API_HOST_BACKUP &&
140- ( ! ( error instanceof RemoteError ) || ( error . status >= 500 && error . status < 600 ) )
141- ) {
147+ if ( ! ( error instanceof RemoteError ) || ( error . status >= 500 && error . status < 600 ) ) {
142148 // Exception was thrown because the primary server (not the backup one) returns an 5xx error.
143149 console . log ( `Switching to backup host due to error: ${ JSON . stringify ( error ) } ` )
144150 switchToBackupHost ( )
145151
146152 // Retry with the backup host. Recreate the URL with the new host. HTTP headers remains the same.
147- let backupUrl = createGetModelsUrls ( currentHost )
153+ let retryUrl = createGetModelsUrls ( currentHost )
148154
149155 try {
150- const backupResponse = await fetch ( backupUrl , { headers : headers } )
151- if ( backupResponse . ok ) {
156+ const retryResponse = await fetch ( retryUrl , { headers : headers } )
157+ if ( retryResponse . ok ) {
152158 // Backup host OK.
153- return await processModelsResponse ( backupResponse )
159+ return await processModelsResponse ( retryResponse )
154160 } else {
155161 // Backup host response not OK.
156- console . error ( `Backup host for getting models for '${ OPENAI_API_TYPE } ' returned an error: ${ JSON . stringify ( backupResponse ) } ` )
157- responseInit = { status : 500 , statusText : backupResponse ? JSON . stringify ( backupResponse ) : "" }
162+ console . error (
163+ `Backup host for getting models for '${ OPENAI_API_TYPE } ' returned an error: ${ JSON . stringify ( retryResponse ) } `
164+ )
165+ responseInit = { status : 500 , statusText : retryResponse ? JSON . stringify ( retryResponse ) : "" }
158166 }
159- } catch ( backupError ) {
167+ } catch ( retryError ) {
160168 // Backup host response throws an HTTP error.
161169 console . error ( `Backup host for '${ OPENAI_API_TYPE } ' threw an exception: ${ JSON . stringify ( error ) } ` )
162170
163171 // Return a 5xx error.
164- responseInit = { status : 500 , statusText : backupError ? JSON . stringify ( backupError ) : "" }
172+ responseInit = { status : 500 , statusText : retryError ? JSON . stringify ( retryError ) : "" }
165173 }
166174 } else {
167175 // Some other exception. No retry.
0 commit comments