Skip to content

Commit 1fb98b4

Browse files
committed
update
1 parent 62eb4eb commit 1fb98b4

File tree

2 files changed

+11
-19
lines changed

2 files changed

+11
-19
lines changed

src/server/routes/openai.ts

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
132132
return
133133
}
134134

135-
const events = await getCompletionEventsV2(options as AzureOptionsV2)
135+
const events = await getCompletionEventsV2(options)
136136

137137
if (isError(events)) {
138138
res.status(424)
@@ -141,12 +141,7 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
141141

142142
res.setHeader('content-type', 'text/event-stream')
143143

144-
const completion = await streamCompletionV2(
145-
events,
146-
options as AzureOptionsV2,
147-
encoding,
148-
res
149-
)
144+
const completion = await streamCompletionV2(events, encoding, res)
150145

151146
tokenCount += completion.tokenCount
152147

@@ -241,7 +236,7 @@ openaiRouter.post(
241236
return
242237
}
243238

244-
const events = await getCompletionEventsV2(options as AzureOptionsV2)
239+
const events = await getCompletionEventsV2(options)
245240

246241
if (isError(events)) {
247242
res.status(424).send(events)
@@ -250,12 +245,7 @@ openaiRouter.post(
250245

251246
res.setHeader('content-type', 'text/event-stream')
252247

253-
const completion = await streamCompletionV2(
254-
events,
255-
options as AzureOptionsV2,
256-
encoding,
257-
res
258-
)
248+
const completion = await streamCompletionV2(events, encoding, res)
259249

260250
tokenCount += completion.tokenCount
261251

src/server/util/azureV2.ts

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,10 @@ import { APIError, AzureOptionsV2 } from '../types'
99
import { AzureOpenAI } from 'openai'
1010
import type { EventStream } from '@azure/openai'
1111
import type { Stream } from 'openai/streaming'
12-
import type { ChatCompletionChunk } from 'openai/resources/chat'
12+
import type {
13+
ChatCompletionChunk,
14+
ChatCompletionCreateParamsStreaming,
15+
} from 'openai/resources/chat'
1316

1417
const endpoint = `https://${AZURE_RESOURCE}.openai.azure.com/`
1518

@@ -61,8 +64,8 @@ const getMockCompletionEvents: () => Promise<
6164
export const getCompletionEventsV2 = async ({
6265
model,
6366
messages,
64-
options,
65-
}: AzureOptionsV2): Promise<
67+
stream,
68+
}: ChatCompletionCreateParamsStreaming): Promise<
6669
Stream<ChatCompletionChunk> | EventStream<ChatCompletionChunk> | APIError
6770
> => {
6871
const deploymentId = validModels.find((m) => m.name === model)?.deployment
@@ -78,7 +81,7 @@ export const getCompletionEventsV2 = async ({
7881
const events = await client.chat.completions.create({
7982
messages,
8083
model: deploymentId,
81-
stream: true,
84+
stream,
8285
})
8386

8487
return events
@@ -91,7 +94,6 @@ export const getCompletionEventsV2 = async ({
9194

9295
export const streamCompletionV2 = async (
9396
events: Stream<ChatCompletionChunk> | EventStream<ChatCompletionChunk>,
94-
options: AzureOptionsV2,
9597
encoding: Tiktoken,
9698
res: Response
9799
) => {

0 commit comments

Comments
 (0)