Skip to content

Commit c13cb2a

Browse files
committed
refactor: extract Azure AI inference path to constant to avoid duplication
1 parent 98d7040 commit c13cb2a

File tree

1 file changed

+9
-9
lines changed

1 file changed

+9
-9
lines changed

src/api/providers/openai.ts

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,17 @@ export const defaultHeaders = {
2525

2626
export interface OpenAiHandlerOptions extends ApiHandlerOptions {}
2727

28+
const AZURE_AI_INFERENCE_PATH = "/models/chat/completions"
29+
2830
export class OpenAiHandler extends BaseProvider implements SingleCompletionHandler {
2931
protected options: OpenAiHandlerOptions
3032
private client: OpenAI
33+
private isAzure: boolean
3134

3235
constructor(options: OpenAiHandlerOptions) {
3336
super()
3437
this.options = options
38+
this.isAzure = options.openAiUseAzure ?? false
3539

3640
const baseURL = this.options.openAiBaseUrl ?? "https://api.openai.com/v1"
3741
const apiKey = this.options.openAiApiKey ?? "not-provided"
@@ -83,7 +87,6 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
8387
urlHost = ""
8488
}
8589
const isAzureAiInference = urlHost.endsWith(".services.ai.azure.com")
86-
const azureAiInferencePath = "/models/chat/completions" // Path for Azure AI Inference
8790
const deepseekReasoner = modelId.includes("deepseek-reasoner") || enabledR1Format
8891
const ark = modelUrl.includes(".volces.com")
8992
if (modelId.startsWith("o3-mini")) {
@@ -154,7 +157,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
154157

155158
const stream = await this.client.chat.completions.create(
156159
requestOptions,
157-
isAzureAiInference ? { path: azureAiInferencePath } : {},
160+
isAzureAiInference ? { path: AZURE_AI_INFERENCE_PATH } : {},
158161
)
159162

160163
const matcher = new XmlMatcher(
@@ -210,7 +213,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
210213

211214
const response = await this.client.chat.completions.create(
212215
requestOptions,
213-
isAzureAiInference ? { path: azureAiInferencePath } : {},
216+
isAzureAiInference ? { path: AZURE_AI_INFERENCE_PATH } : {},
214217
)
215218

216219
yield {
@@ -246,15 +249,14 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
246249
urlHost = ""
247250
}
248251
const isAzureAiInference = urlHost.endsWith(".services.ai.azure.com")
249-
const azureAiInferencePath = "/models/chat/completions" // Path for Azure AI Inference
250252
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
251253
model: this.getModel().id,
252254
messages: [{ role: "user", content: prompt }],
253255
}
254256

255257
const response = await this.client.chat.completions.create(
256258
requestOptions,
257-
isAzureAiInference ? { path: azureAiInferencePath } : {},
259+
isAzureAiInference ? { path: AZURE_AI_INFERENCE_PATH } : {},
258260
)
259261
return response.choices[0]?.message.content || ""
260262
} catch (error) {
@@ -279,7 +281,6 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
279281
methodUrlHost = ""
280282
}
281283
const methodIsAzureAiInference = methodUrlHost.endsWith(".services.ai.azure.com")
282-
const methodAzureAiInferencePath = "/models/chat/completions"
283284

284285
const stream = await this.client.chat.completions.create(
285286
{
@@ -295,7 +296,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
295296
stream_options: { include_usage: true },
296297
reasoning_effort: this.getModel().info.reasoningEffort,
297298
},
298-
methodIsAzureAiInference ? { path: methodAzureAiInferencePath } : {},
299+
methodIsAzureAiInference ? { path: AZURE_AI_INFERENCE_PATH } : {},
299300
)
300301

301302
yield* this.handleStreamResponse(stream)
@@ -319,11 +320,10 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
319320
methodUrlHost = ""
320321
}
321322
const methodIsAzureAiInference = methodUrlHost.endsWith(".services.ai.azure.com")
322-
const methodAzureAiInferencePath = "/models/chat/completions"
323323

324324
const response = await this.client.chat.completions.create(
325325
requestOptions,
326-
methodIsAzureAiInference ? { path: methodAzureAiInferencePath } : {},
326+
methodIsAzureAiInference ? { path: AZURE_AI_INFERENCE_PATH } : {},
327327
)
328328

329329
yield {

0 commit comments

Comments
 (0)