File tree Expand file tree Collapse file tree 6 files changed +18
-12
lines changed Expand file tree Collapse file tree 6 files changed +18
-12
lines changed Original file line number Diff line number Diff line change @@ -14,6 +14,7 @@ export class HuggingFaceHandler extends BaseProvider implements SingleCompletion
1414 private client : OpenAI
1515 private options : ApiHandlerOptions
1616 private modelCache : ModelRecord | null = null
17+ private readonly providerName = "HuggingFace"
1718
1819 constructor ( options : ApiHandlerOptions ) {
1920 super ( )
@@ -69,7 +70,7 @@ export class HuggingFaceHandler extends BaseProvider implements SingleCompletion
6970 try {
7071 stream = await this . client . chat . completions . create ( params )
7172 } catch ( error ) {
72- throw handleOpenAIError ( error , "HuggingFace" )
73+ throw handleOpenAIError ( error , this . providerName )
7374 }
7475
7576 for await ( const chunk of stream ) {
@@ -103,7 +104,7 @@ export class HuggingFaceHandler extends BaseProvider implements SingleCompletion
103104
104105 return response . choices [ 0 ] ?. message . content || ""
105106 } catch ( error ) {
106- throw handleOpenAIError ( error , "HuggingFace" )
107+ throw handleOpenAIError ( error , this . providerName )
107108 }
108109 }
109110
Original file line number Diff line number Diff line change @@ -20,6 +20,7 @@ import { handleOpenAIError } from "./utils/openai-error-handler"
2020export class LmStudioHandler extends BaseProvider implements SingleCompletionHandler {
2121 protected options : ApiHandlerOptions
2222 private client : OpenAI
23+ private readonly providerName = "LM Studio"
2324
2425 constructor ( options : ApiHandlerOptions ) {
2526 super ( )
@@ -96,7 +97,7 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
9697 try {
9798 results = await this . client . chat . completions . create ( params )
9899 } catch ( error ) {
99- throw handleOpenAIError ( error , "LM Studio" )
100+ throw handleOpenAIError ( error , this . providerName )
100101 }
101102
102103 const matcher = new XmlMatcher (
@@ -177,7 +178,7 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
177178 try {
178179 response = await this . client . chat . completions . create ( params )
179180 } catch ( error ) {
180- throw handleOpenAIError ( error , "LM Studio" )
181+ throw handleOpenAIError ( error , this . providerName )
181182 }
182183 return response . choices [ 0 ] ?. message . content || ""
183184 } catch ( error ) {
Original file line number Diff line number Diff line change @@ -21,6 +21,7 @@ type CompletionUsage = OpenAI.Chat.Completions.ChatCompletionChunk["usage"]
2121export class OllamaHandler extends BaseProvider implements SingleCompletionHandler {
2222 protected options : ApiHandlerOptions
2323 private client : OpenAI
24+ private readonly providerName = "Ollama"
2425
2526 constructor ( options : ApiHandlerOptions ) {
2627 super ( )
@@ -65,7 +66,7 @@ export class OllamaHandler extends BaseProvider implements SingleCompletionHandl
6566 stream_options : { include_usage : true } ,
6667 } )
6768 } catch ( error ) {
68- throw handleOpenAIError ( error , "Ollama" )
69+ throw handleOpenAIError ( error , this . providerName )
6970 }
7071 const matcher = new XmlMatcher (
7172 "think" ,
@@ -123,7 +124,7 @@ export class OllamaHandler extends BaseProvider implements SingleCompletionHandl
123124 stream : false ,
124125 } )
125126 } catch ( error ) {
126- throw handleOpenAIError ( error , "Ollama" )
127+ throw handleOpenAIError ( error , this . providerName )
127128 }
128129 return response . choices [ 0 ] ?. message . content || ""
129130 } catch ( error ) {
Original file line number Diff line number Diff line change @@ -86,6 +86,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
8686 private client : OpenAI
8787 protected models : ModelRecord = { }
8888 protected endpoints : ModelRecord = { }
89+ private readonly providerName = "OpenRouter"
8990
9091 constructor ( options : ApiHandlerOptions ) {
9192 super ( )
@@ -166,7 +167,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
166167 try {
167168 stream = await this . client . chat . completions . create ( completionParams )
168169 } catch ( error ) {
169- throw handleOpenAIError ( error , "OpenRouter" )
170+ throw handleOpenAIError ( error , this . providerName )
170171 }
171172
172173 let lastUsage : CompletionUsage | undefined = undefined
@@ -269,7 +270,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
269270 try {
270271 response = await this . client . chat . completions . create ( completionParams )
271272 } catch ( error ) {
272- throw handleOpenAIError ( error , "OpenRouter" )
273+ throw handleOpenAIError ( error , this . providerName )
273274 }
274275
275276 if ( "error" in response ) {
Original file line number Diff line number Diff line change @@ -43,6 +43,7 @@ export class RequestyHandler extends BaseProvider implements SingleCompletionHan
4343 protected models : ModelRecord = { }
4444 private client : OpenAI
4545 private baseURL : string
46+ private readonly providerName = "Requesty"
4647
4748 constructor ( options : ApiHandlerOptions ) {
4849 super ( )
@@ -133,7 +134,7 @@ export class RequestyHandler extends BaseProvider implements SingleCompletionHan
133134 try {
134135 stream = await this . client . chat . completions . create ( completionParams )
135136 } catch ( error ) {
136- throw handleOpenAIError ( error , "Requesty" )
137+ throw handleOpenAIError ( error , this . providerName )
137138 }
138139 let lastUsage : any = undefined
139140
@@ -174,7 +175,7 @@ export class RequestyHandler extends BaseProvider implements SingleCompletionHan
174175 try {
175176 response = await this . client . chat . completions . create ( completionParams )
176177 } catch ( error ) {
177- throw handleOpenAIError ( error , "Requesty" )
178+ throw handleOpenAIError ( error , this . providerName )
178179 }
179180 return response . choices [ 0 ] ?. message . content || ""
180181 }
Original file line number Diff line number Diff line change @@ -19,6 +19,7 @@ const XAI_DEFAULT_TEMPERATURE = 0
1919export class XAIHandler extends BaseProvider implements SingleCompletionHandler {
2020 protected options : ApiHandlerOptions
2121 private client : OpenAI
22+ private readonly providerName = "xAI"
2223
2324 constructor ( options : ApiHandlerOptions ) {
2425 super ( )
@@ -64,7 +65,7 @@ export class XAIHandler extends BaseProvider implements SingleCompletionHandler
6465 ...( reasoning && reasoning ) ,
6566 } )
6667 } catch ( error ) {
67- throw handleOpenAIError ( error , "xAI" )
68+ throw handleOpenAIError ( error , this . providerName )
6869 }
6970
7071 for await ( const chunk of stream ) {
@@ -120,7 +121,7 @@ export class XAIHandler extends BaseProvider implements SingleCompletionHandler
120121
121122 return response . choices [ 0 ] ?. message . content || ""
122123 } catch ( error ) {
123- throw handleOpenAIError ( error , "xAI" )
124+ throw handleOpenAIError ( error , this . providerName )
124125 }
125126 }
126127}
You can’t perform that action at this time.
0 commit comments