@@ -4,6 +4,7 @@ import multer from 'multer'
4
4
import { CourseChatRequest , RequestWithUser } from '../types'
5
5
import { isError } from '../util/parser'
6
6
import { calculateUsage , incrementUsage , checkUsage , checkCourseUsage , incrementCourseUsage } from '../services/chatInstances/usage'
7
+ import { getCompletionEvents , streamCompletion } from '../util/azure'
7
8
import { streamResponsesEvents , getResponsesEvents } from '../util/azureV2'
8
9
import { getMessageContext , getModelContextLimit , getCourseModel , getAllowedModels } from '../util/util'
9
10
import getEncoding from '../util/tiktoken'
@@ -43,8 +44,9 @@ const fileParsing = async (options: any, req: any) => {
43
44
return options . messages
44
45
}
45
46
46
- openaiRouter . post ( '/stream' , upload . single ( 'file' ) , async ( r , res ) => {
47
+ openaiRouter . post ( '/stream/:version? ' , upload . single ( 'file' ) , async ( r , res ) => {
47
48
const req = r as RequestWithUser
49
+ const { version } = r . params
48
50
const { options, courseId } = JSON . parse ( req . body . data )
49
51
const { model, userConsent } = options
50
52
const { user } = req
@@ -98,11 +100,16 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
98
100
return
99
101
}
100
102
101
- const events = await getResponsesEvents ( {
102
- model : options . model ,
103
- input : options . messages ,
104
- stream : options . stream ,
105
- } )
103
+ let events
104
+ if ( version === 'v2' ) {
105
+ events = await getResponsesEvents ( {
106
+ model : options . model ,
107
+ input : options . messages ,
108
+ stream : options . stream ,
109
+ } )
110
+ } else {
111
+ events = await getCompletionEvents ( options )
112
+ }
106
113
107
114
if ( isError ( events ) ) {
108
115
res . status ( 424 )
@@ -111,7 +118,12 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
111
118
112
119
res . setHeader ( 'content-type' , 'text/event-stream' )
113
120
114
- const completion = await streamResponsesEvents ( events , encoding , res )
121
+ let completion
122
+ if ( version === 'v2' ) {
123
+ completion = await streamResponsesEvents ( events , encoding , res )
124
+ } else {
125
+ completion = await streamCompletion ( events , options , encoding , res )
126
+ }
115
127
116
128
tokenCount += completion . tokenCount
117
129
@@ -159,8 +171,8 @@ openaiRouter.post('/stream', upload.single('file'), async (r, res) => {
159
171
return
160
172
} )
161
173
162
- openaiRouter . post ( '/stream/:courseId' , upload . single ( 'file' ) , async ( r , res ) => {
163
- const { courseId } = r . params
174
+ openaiRouter . post ( '/stream/:courseId/:version? ' , upload . single ( 'file' ) , async ( r , res ) => {
175
+ const { courseId, version } = r . params
164
176
const req = r as CourseChatRequest
165
177
const { options } = JSON . parse ( r . body . data )
166
178
const { user } = req
@@ -202,11 +214,16 @@ openaiRouter.post('/stream/:courseId', upload.single('file'), async (r, res) =>
202
214
return
203
215
}
204
216
205
- const events = await getResponsesEvents ( {
206
- model : options . model ,
207
- input : options . messages ,
208
- stream : options . stream ,
209
- } )
217
+ let events
218
+ if ( version === 'v2' ) {
219
+ events = await getResponsesEvents ( {
220
+ model : options . model ,
221
+ input : options . messages ,
222
+ stream : options . stream ,
223
+ } )
224
+ } else {
225
+ events = await getCompletionEvents ( options )
226
+ }
210
227
211
228
if ( isError ( events ) ) {
212
229
res . status ( 424 ) . send ( events )
@@ -215,7 +232,12 @@ openaiRouter.post('/stream/:courseId', upload.single('file'), async (r, res) =>
215
232
216
233
res . setHeader ( 'content-type' , 'text/event-stream' )
217
234
218
- const completion = await streamResponsesEvents ( events , encoding , res )
235
+ let completion
236
+ if ( version === 'v2' ) {
237
+ completion = await streamResponsesEvents ( events , encoding , res )
238
+ } else {
239
+ completion = await streamCompletion ( events , options , encoding , res )
240
+ }
219
241
220
242
tokenCount += completion . tokenCount
221
243
0 commit comments