@@ -41,7 +41,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
4141 private async * handleO1FamilyMessage (
4242 modelId : string ,
4343 systemPrompt : string ,
44- messages : Anthropic . Messages . MessageParam [ ]
44+ messages : Anthropic . Messages . MessageParam [ ] ,
4545 ) : ApiStream {
4646 // o1 supports developer prompt with formatting
4747 // o1-preview and o1-mini only support user messages
@@ -63,7 +63,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
6363 private async * handleO3FamilyMessage (
6464 modelId : string ,
6565 systemPrompt : string ,
66- messages : Anthropic . Messages . MessageParam [ ]
66+ messages : Anthropic . Messages . MessageParam [ ] ,
6767 ) : ApiStream {
6868 const stream = await this . client . chat . completions . create ( {
6969 model : "o3-mini" ,
@@ -85,7 +85,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
8585 private async * handleDefaultModelMessage (
8686 modelId : string ,
8787 systemPrompt : string ,
88- messages : Anthropic . Messages . MessageParam [ ]
88+ messages : Anthropic . Messages . MessageParam [ ] ,
8989 ) : ApiStream {
9090 const stream = await this . client . chat . completions . create ( {
9191 model : modelId ,
@@ -98,9 +98,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
9898 yield * this . handleStreamResponse ( stream )
9999 }
100100
101- private async * yieldResponseData (
102- response : OpenAI . Chat . Completions . ChatCompletion
103- ) : ApiStream {
101+ private async * yieldResponseData ( response : OpenAI . Chat . Completions . ChatCompletion ) : ApiStream {
104102 yield {
105103 type : "text" ,
106104 text : response . choices [ 0 ] ?. message . content || "" ,
@@ -112,9 +110,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
112110 }
113111 }
114112
115- private async * handleStreamResponse (
116- stream : AsyncIterable < OpenAI . Chat . Completions . ChatCompletionChunk >
117- ) : ApiStream {
113+ private async * handleStreamResponse ( stream : AsyncIterable < OpenAI . Chat . Completions . ChatCompletionChunk > ) : ApiStream {
118114 for await ( const chunk of stream ) {
119115 const delta = chunk . choices [ 0 ] ?. delta
120116 if ( delta ?. content ) {
@@ -168,7 +164,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
168164
169165 private getO1CompletionOptions (
170166 modelId : string ,
171- prompt : string
167+ prompt : string ,
172168 ) : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming {
173169 return {
174170 model : modelId ,
@@ -178,7 +174,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
178174
179175 private getO3CompletionOptions (
180176 modelId : string ,
181- prompt : string
177+ prompt : string ,
182178 ) : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming {
183179 return {
184180 model : "o3-mini" ,
@@ -189,7 +185,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
189185
190186 private getDefaultCompletionOptions (
191187 modelId : string ,
192- prompt : string
188+ prompt : string ,
193189 ) : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming {
194190 return {
195191 model : modelId ,
0 commit comments