@@ -41,7 +41,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
4141 private async * handleO1FamilyMessage (
4242 modelId : string ,
4343 systemPrompt : string ,
44- messages : Anthropic . Messages . MessageParam [ ] ,
44+ messages : Anthropic . Messages . MessageParam [ ]
4545 ) : ApiStream {
4646 // o1 supports developer prompt with formatting
4747 // o1-preview and o1-mini only support user messages
@@ -63,7 +63,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
6363 private async * handleO3FamilyMessage (
6464 modelId : string ,
6565 systemPrompt : string ,
66- messages : Anthropic . Messages . MessageParam [ ] ,
66+ messages : Anthropic . Messages . MessageParam [ ]
6767 ) : ApiStream {
6868 const stream = await this . client . chat . completions . create ( {
6969 model : "o3-mini" ,
@@ -85,7 +85,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
8585 private async * handleDefaultModelMessage (
8686 modelId : string ,
8787 systemPrompt : string ,
88- messages : Anthropic . Messages . MessageParam [ ] ,
88+ messages : Anthropic . Messages . MessageParam [ ]
8989 ) : ApiStream {
9090 const stream = await this . client . chat . completions . create ( {
9191 model : modelId ,
@@ -98,7 +98,9 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
9898 yield * this . handleStreamResponse ( stream )
9999 }
100100
101- private async * yieldResponseData ( response : OpenAI . Chat . Completions . ChatCompletion ) : ApiStream {
101+ private async * yieldResponseData (
102+ response : OpenAI . Chat . Completions . ChatCompletion
103+ ) : ApiStream {
102104 yield {
103105 type : "text" ,
104106 text : response . choices [ 0 ] ?. message . content || "" ,
@@ -110,7 +112,9 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
110112 }
111113 }
112114
113- private async * handleStreamResponse ( stream : AsyncIterable < OpenAI . Chat . Completions . ChatCompletionChunk > ) : ApiStream {
115+ private async * handleStreamResponse (
116+ stream : AsyncIterable < OpenAI . Chat . Completions . ChatCompletionChunk >
117+ ) : ApiStream {
114118 for await ( const chunk of stream ) {
115119 const delta = chunk . choices [ 0 ] ?. delta
116120 if ( delta ?. content ) {
@@ -164,7 +168,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
164168
165169 private getO1CompletionOptions (
166170 modelId : string ,
167- prompt : string ,
171+ prompt : string
168172 ) : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming {
169173 return {
170174 model : modelId ,
@@ -174,7 +178,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
174178
175179 private getO3CompletionOptions (
176180 modelId : string ,
177- prompt : string ,
181+ prompt : string
178182 ) : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming {
179183 return {
180184 model : "o3-mini" ,
@@ -185,7 +189,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
185189
186190 private getDefaultCompletionOptions (
187191 modelId : string ,
188- prompt : string ,
192+ prompt : string
189193 ) : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming {
190194 return {
191195 model : modelId ,
0 commit comments