Skip to content

Commit 14f27c0

Browse files
committed
split chat v2 api routes
1 parent 9139d39 commit 14f27c0

File tree

3 files changed

+42
-29
lines changed

3 files changed

+42
-29
lines changed

src/client/components/ChatV2/util.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ export const getCompletionStream = async ({
4444

4545
formData.set('data', JSON.stringify(data))
4646

47-
return postAbortableStream('/ai/stream', formData, abortController)
47+
return postAbortableStream('/ai/stream/v2', formData, abortController)
4848
}
4949

5050
interface GetCourseCompletionStreamProps {
@@ -73,5 +73,5 @@ export const getCourseCompletionStream = async ({ id, system, messages, model, c
7373
const formData = new FormData()
7474
formData.set('data', JSON.stringify(data))
7575

76-
return postAbortableStream(`/ai/stream/`, formData, abortController)
76+
return postAbortableStream(`/ai/stream/v2`, formData, abortController)
7777
}

src/server/routes/openai.ts

Lines changed: 37 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import multer from 'multer'
44
import { CourseChatRequest, RequestWithUser } from '../types'
55
import { isError } from '../util/parser'
66
import { calculateUsage, incrementUsage, checkUsage, checkCourseUsage, incrementCourseUsage } from '../services/chatInstances/usage'
7+
import { getCompletionEvents, streamCompletion } from '../util/azure'
78
import { streamResponsesEvents, getResponsesEvents } from '../util/azureV2'
89
import { getMessageContext, getModelContextLimit, getCourseModel, getAllowedModels } from '../util/util'
910
import getEncoding from '../util/tiktoken'
@@ -43,8 +44,9 @@ const fileParsing = async (options: any, req: any) => {
4344
return options.messages
4445
}
4546

46-
openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
47+
openaiRouter.post('/stream/:version?', upload.single('file'), async (r, res) => {
4748
const req = r as RequestWithUser
49+
const { version } = r.params
4850
const { options, courseId } = JSON.parse(req.body.data)
4951
const { model, userConsent } = options
5052
const { user } = req
@@ -98,11 +100,16 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
98100
return
99101
}
100102

101-
const events = await getResponsesEvents({
102-
model: options.model,
103-
input: options.messages,
104-
stream: options.stream,
105-
})
103+
let events
104+
if (version === 'v2') {
105+
events = await getResponsesEvents({
106+
model: options.model,
107+
input: options.messages,
108+
stream: options.stream,
109+
})
110+
} else {
111+
events = await getCompletionEvents(options)
112+
}
106113

107114
if (isError(events)) {
108115
res.status(424)
@@ -111,7 +118,12 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
111118

112119
res.setHeader('content-type', 'text/event-stream')
113120

114-
const completion = await streamResponsesEvents(events, encoding, res)
121+
let completion
122+
if (version === 'v2') {
123+
completion = await streamResponsesEvents(events, encoding, res)
124+
} else {
125+
completion = await streamCompletion(events, options, encoding, res)
126+
}
115127

116128
tokenCount += completion.tokenCount
117129

@@ -159,8 +171,8 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
159171
return
160172
})
161173

162-
openaiRouter.post('/stream/:courseId', upload.single('file'), async (r, res) => {
163-
const { courseId } = r.params
174+
openaiRouter.post('/stream/:courseId/:version?', upload.single('file'), async (r, res) => {
175+
const { courseId, version } = r.params
164176
const req = r as CourseChatRequest
165177
const { options } = JSON.parse(r.body.data)
166178
const { user } = req
@@ -202,11 +214,16 @@ openaiRouter.post('/stream/:courseId', upload.single('file'), async (r, res) =>
202214
return
203215
}
204216

205-
const events = await getResponsesEvents({
206-
model: options.model,
207-
input: options.messages,
208-
stream: options.stream,
209-
})
217+
let events
218+
if (version === 'v2') {
219+
events = await getResponsesEvents({
220+
model: options.model,
221+
input: options.messages,
222+
stream: options.stream,
223+
})
224+
} else {
225+
events = await getCompletionEvents(options)
226+
}
210227

211228
if (isError(events)) {
212229
res.status(424).send(events)
@@ -215,7 +232,12 @@ openaiRouter.post('/stream/:courseId', upload.single('file'), async (r, res) =>
215232

216233
res.setHeader('content-type', 'text/event-stream')
217234

218-
const completion = await streamResponsesEvents(events, encoding, res)
235+
let completion
236+
if (version === 'v2') {
237+
completion = await streamResponsesEvents(events, encoding, res)
238+
} else {
239+
completion = await streamCompletion(events, options, encoding, res)
240+
}
219241

220242
tokenCount += completion.tokenCount
221243

src/server/util/azureV2.ts

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,7 @@ export const getResponsesEvents = async ({
5656
> => {
5757
const deploymentId = validModels.find((m) => m.name === model)?.deployment
5858

59-
if (!deploymentId)
60-
throw new Error(
61-
`Invalid model: ${model}, not one of ${validModels.map((m) => m.name).join(', ')}`
62-
)
59+
if (!deploymentId) throw new Error(`Invalid model: ${model}, not one of ${validModels.map((m) => m.name).join(', ')}`)
6360

6461
// Mocking disabled because it's difficult to mock a event stream for responses API.
6562
// if (deploymentId === 'mock') return getMockCompletionEvents()
@@ -80,11 +77,7 @@ export const getResponsesEvents = async ({
8077
}
8178
}
8279

83-
export const streamResponsesEvents = async (
84-
events: Stream<ResponseStreamEvent>,
85-
encoding: Tiktoken,
86-
res: Response
87-
) => {
80+
export const streamResponsesEvents = async (events: Stream<ResponseStreamEvent>, encoding: Tiktoken, res: Response) => {
8881
let tokenCount = 0
8982
const contents = []
9083

@@ -99,9 +92,7 @@ export const streamResponsesEvents = async (
9992
if (err) logger.error(`${event.delta} ${err}`)
10093
})
10194
) {
102-
logger.info(
103-
`${event.delta} res.write returned false, waiting for drain`
104-
)
95+
logger.info(`${event.delta} res.write returned false, waiting for drain`)
10596
res.once('drain', resolve)
10697
} else {
10798
process.nextTick(resolve)

0 commit comments

Comments
 (0)