Skip to content

Commit b84b355

Browse files
committed
refactor chatV2 responses API config for courseId and lint fix
1 parent 4f9351e commit b84b355

File tree

10 files changed

+110
-83
lines changed

10 files changed

+110
-83
lines changed

src/client/components/Chat/ModelSelector.tsx

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,4 @@ const ModelSelector = ({ currentModel, setModel, models }: { currentModel: strin
3131
)
3232
}
3333

34-
35-
36-
export default ModelSelector
34+
export default ModelSelector

src/client/components/Chat/Status.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { useTranslation } from 'react-i18next'
22
import { Box, Typography, MenuItem, FormControl, Select, SelectChangeEvent, InputLabel } from '@mui/material'
33
import { FREE_MODEL } from '../../../config'
4-
import ModelSelector from './ModelSelector';
4+
import ModelSelector from './ModelSelector'
55

66
const Status = ({ model, setModel, models, usage, limit }: { model: string; setModel: (model: string) => void; models: string[]; usage: number; limit: number }) => {
77
const { t } = useTranslation()

src/client/components/ChatV2/ChatBox.tsx

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import { Send } from '@mui/icons-material'
22
import { Box, Container, IconButton, Paper, TextField, Typography } from '@mui/material'
33
import { useEffect, useState } from 'react'
4-
import useUserStatus from '../../hooks/useUserStatus';
5-
import { useParams } from 'react-router-dom';
6-
import { useTranslation } from 'react-i18next';
4+
import useUserStatus from '../../hooks/useUserStatus'
5+
import { useParams } from 'react-router-dom'
6+
import { useTranslation } from 'react-i18next'
77

88
export const ChatBox = ({ disabled, onSubmit }: { disabled: boolean; onSubmit: (message: string) => void }) => {
99
const { courseId } = useParams()
@@ -23,8 +23,8 @@ export const ChatBox = ({ disabled, onSubmit }: { disabled: boolean; onSubmit: (
2323
console.log('userStatus', userStatus)
2424
})
2525

26-
if(statusLoading) {
27-
return (<p>loading</p>)
26+
if (statusLoading) {
27+
return <p>loading</p>
2828
}
2929
return (
3030
<Container
@@ -73,13 +73,12 @@ export const ChatBox = ({ disabled, onSubmit }: { disabled: boolean; onSubmit: (
7373
},
7474
}}
7575
/>
76-
76+
7777
<Box>
7878
<Typography variant="body1" style={{ padding: '0.5rem', fontSize: '0.875rem' }}>
7979
{userStatus.usage} / {userStatus.limit} {t('status:tokensUsed')}
8080
</Typography>
8181
</Box>
82-
8382
</Paper>
8483
</Container>
8584
)

src/client/components/ChatV2/ChatV2.tsx

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,6 @@ export const ChatV2 = () => {
168168
flexDirection: 'column',
169169
}}
170170
>
171-
172171
<SettingsModal open={settingsModalOpen} setOpen={setSettingsModalOpen}></SettingsModal>
173172
<Box sx={{ display: 'flex', gap: '1rem' }}>
174173
{disclaimerInfo && <Disclaimer disclaimer={disclaimerInfo} />}
@@ -177,7 +176,6 @@ export const ChatV2 = () => {
177176
<IconButton onClick={() => setSettingsModalOpen(true)} title="Settings">
178177
<Settings></Settings>
179178
</IconButton>
180-
181179
</Box>
182180
<Box ref={chatContainerRef}>
183181
<Conversation messages={messages} completion={completion} />
Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,10 @@
1-
import { Close } from "@mui/icons-material"
2-
import { Box, IconButton, Modal, Typography } from "@mui/material"
1+
import { Close } from '@mui/icons-material'
2+
import { Box, IconButton, Modal, Typography } from '@mui/material'
33

4-
5-
6-
7-
8-
export const SettingsModal = ({open, setOpen}) => {
9-
return(
4+
export const SettingsModal = ({ open, setOpen }) => {
5+
return (
106
<Modal open={open} onClose={() => setOpen(false)}>
11-
<Box
7+
<Box
128
sx={{
139
position: 'absolute',
1410
top: '50%',
@@ -21,18 +17,13 @@ export const SettingsModal = ({open, setOpen}) => {
2117
p: 4,
2218
}}
2319
>
24-
<IconButton
25-
onClick={() => setOpen(false)}
26-
sx={{ position: 'absolute', top: 8, right: 8, color: 'grey.500' }}
27-
>
20+
<IconButton onClick={() => setOpen(false)} sx={{ position: 'absolute', top: 8, right: 8, color: 'grey.500' }}>
2821
<Close></Close>
2922
</IconButton>
3023
<Typography id="modal-title" variant="h6" component="h2">
3124
Settings
3225
</Typography>
33-
34-
3526
</Box>
3627
</Modal>
3728
)
38-
}
29+
}

src/server/routes/openai.ts

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import { CourseChatRequest, RequestWithUser } from '../types'
55
import { isError } from '../util/parser'
66
import { calculateUsage, incrementUsage, checkUsage, checkCourseUsage, incrementCourseUsage } from '../services/chatInstances/usage'
77
import { getCompletionEvents, streamCompletion } from '../util/azure/client'
8-
import { ResponsesClient } from '../util/azure/clientV2'
8+
import { ResponsesClient } from '../util/azure/ResponsesAPI'
99
import { getMessageContext, getModelContextLimit, getCourseModel, getAllowedModels } from '../util/util'
1010
import getEncoding from '../util/tiktoken'
1111
import logger from '../util/logger'
@@ -100,7 +100,9 @@ openaiRouter.post('/stream/:version?', upload.single('file'), async (r, res) =>
100100
return
101101
}
102102

103-
const responsesClient = new ResponsesClient(model)
103+
const responsesClient = new ResponsesClient({
104+
model,
105+
})
104106

105107
let events
106108
if (version === 'v2') {
@@ -119,7 +121,6 @@ openaiRouter.post('/stream/:version?', upload.single('file'), async (r, res) =>
119121
if (version === 'v2') {
120122
completion = await responsesClient.handleResponse({
121123
events,
122-
prevMessages: options.messages,
123124
encoding,
124125
res,
125126
})
@@ -216,7 +217,10 @@ openaiRouter.post('/stream/:courseId/:version?', upload.single('file'), async (r
216217
return
217218
}
218219

219-
const responsesClient = new ResponsesClient(model)
220+
const responsesClient = new ResponsesClient({
221+
model,
222+
courseId,
223+
})
220224

221225
let events
222226
if (version === 'v2') {
@@ -236,7 +240,6 @@ openaiRouter.post('/stream/:courseId/:version?', upload.single('file'), async (r
236240
if (version === 'v2') {
237241
completion = await responsesClient.handleResponse({
238242
events,
239-
prevMessages: options.messages,
240243
encoding,
241244
res,
242245
})

src/server/util/azure/clientV2.ts renamed to src/server/util/azure/ResponsesAPI.ts

Lines changed: 28 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { Tiktoken } from '@dqbd/tiktoken'
22
import { Response } from 'express'
3-
import { isError } from '../../util/parser'
3+
import { isError } from '../parser'
44

55
import { AZURE_RESOURCE, AZURE_API_KEY } from '../config'
66
import { validModels, inProduction } from '../../../config'
@@ -11,16 +11,10 @@ import { AzureOpenAI } from 'openai'
1111

1212
// import { EventStream } from '@azure/openai'
1313
import { Stream } from 'openai/streaming'
14-
import {
15-
FileSearchTool,
16-
FunctionTool,
17-
ResponseInput,
18-
ResponseInputItem,
19-
ResponseStreamEvent,
20-
} from 'openai/resources/responses/responses'
14+
import { FileSearchTool, FunctionTool, ResponseInput, ResponseInputItem, ResponseStreamEvent } from 'openai/resources/responses/responses'
2115

22-
// import { ohtuRAGTest } from './functionTools'
23-
import { fileSearchTest } from './fileSearchTools'
16+
import { courseAssistants, type CourseAssistant } from './courseAssistants'
17+
import { createFileSearchTool } from './util'
2418

2519
const endpoint = `https://${AZURE_RESOURCE}.openai.azure.com/`
2620

@@ -37,31 +31,36 @@ const client = getAzureOpenAIClient(process.env.GPT_4O)
3731
export class ResponsesClient {
3832
model: string
3933
instructions: string
40-
tools: (FunctionTool | FileSearchTool)[]
34+
tools: FileSearchTool[]
4135

42-
constructor(model: string, instructions?: string) {
36+
constructor({ model, courseId }: { model: string; courseId?: string }) {
4337
const deploymentId = validModels.find((m) => m.name === model)?.deployment
4438

45-
if (!deploymentId)
46-
throw new Error(
47-
`Invalid model: ${model}, not one of ${validModels.map((m) => m.name).join(', ')}`
48-
)
39+
if (!deploymentId) throw new Error(`Invalid model: ${model}, not one of ${validModels.map((m) => m.name).join(', ')}`)
40+
41+
let courseAssistant: CourseAssistant
42+
43+
if (courseId) {
44+
courseAssistant = courseAssistants.find((assistant) => assistant.course_id === courseId)
45+
46+
if (!courseAssistant) throw new Error(`No course assistant found for course ID: ${courseId}`)
47+
} else {
48+
courseAssistant = courseAssistants.find((assistant) => assistant.name === 'default')
49+
}
4950

5051
this.model = deploymentId
51-
this.instructions =
52-
instructions ||
53-
'Olet ohjelmistotuotanto kurssin avustaja. Jos käyttäjä kysyy jotain, niin arvioi ensin liittyykö se ohjelmistotuotannon kurssiin. Jos liittyy, niin toteuta file_search. jos et löydä sopivia tiedostoja, niin sano että haulla ei löytynyt mitään. Jos käyttäjän viesti ei liittynyt ohjelmistotuotannon kurssiin, niin kysy ystävällisesti voitko auttaa jotenkin muuten kurssimateriaalien suhteen.'
54-
this.tools = [
55-
// ohtuRAGTest.definition,
56-
fileSearchTest.definition,
57-
]
52+
this.instructions = courseAssistant.assistant_instruction
53+
54+
const fileSearchTool = courseId
55+
? createFileSearchTool({
56+
vectorStoreId: courseAssistant.vector_store_id,
57+
})
58+
: null
59+
60+
this.tools = [fileSearchTool]
5861
}
5962

60-
async createResponse({
61-
input,
62-
}: {
63-
input: ResponseInput
64-
}): Promise<Stream<ResponseStreamEvent> | APIError> {
63+
async createResponse({ input }: { input: ResponseInput }): Promise<Stream<ResponseStreamEvent> | APIError> {
6564
try {
6665
return await client.responses.create({
6766
model: this.model,
@@ -79,17 +78,7 @@ export class ResponsesClient {
7978
}
8079
}
8180

82-
async handleResponse({
83-
events,
84-
prevMessages,
85-
encoding,
86-
res,
87-
}: {
88-
events: Stream<ResponseStreamEvent>
89-
prevMessages: ResponseInput
90-
encoding: Tiktoken
91-
res: Response
92-
}) {
81+
async handleResponse({ events, encoding, res }: { events: Stream<ResponseStreamEvent>; encoding: Tiktoken; res: Response }) {
9382
let tokenCount = 0
9483
const contents = []
9584

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
export interface CourseAssistant {
2+
course_id: string
3+
name: string
4+
assistant_instruction: string
5+
vector_store_id: string
6+
}
7+
8+
export const courseAssistants = [
9+
{
10+
// default assistant, without course
11+
course_id: null,
12+
name: 'default',
13+
assistant_instruction: 'Olet avualias avustaja.',
14+
vector_store_id: null,
15+
},
16+
{
17+
course_id: '123',
18+
name: 'ohtu-kurssi',
19+
assistant_instruction:
20+
'Olet ohjelmistotuotanto kurssin avustaja. Jos käyttäjä kysyy jotain, niin arvioi ensin liittyykö se ohjelmistotuotannon kurssiin. Jos liittyy, niin toteuta file_search. jos et löydä sopivia tiedostoja, niin sano että haulla ei löytynyt mitään. Jos käyttäjän viesti ei liittynyt ohjelmistotuotannon kurssiin, niin kysy ystävällisesti voitko auttaa jotenkin muuten kurssimateriaalien suhteen.',
21+
vector_store_id: 'vs_Lsyd0uMbgeT8lS9pnxZQEl3c',
22+
},
23+
] as const satisfies CourseAssistant[]

src/server/util/azure/functionTools.ts

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,7 @@ export const functionCallTest: functionCallObject = {
1010
definition: {
1111
type: 'function',
1212
name: 'test_knowledge_retrieval',
13-
description:
14-
'Test tool for knowledge retrieval. Always call this when user says TEST-RAG',
13+
description: 'Test tool for knowledge retrieval. Always call this when user says TEST-RAG',
1514
parameters: {
1615
type: 'object',
1716
properties: {
@@ -25,15 +24,11 @@ export const functionCallTest: functionCallObject = {
2524
},
2625
strict: true, // or true, depending on your requirements
2726
},
28-
function: async (
29-
client: AzureOpenAI,
30-
query: string
31-
): Promise<{ query: string; result: string }> => {
27+
function: async (client: AzureOpenAI, query: string): Promise<{ query: string; result: string }> => {
3228
// Simulate a tool function that returns a simple message
3329
return {
3430
query,
35-
result:
36-
'This is a test result from the test tool. The secret is: Chili kastike',
31+
result: 'This is a test result from the test tool. The secret is: Chili kastike',
3732
}
3833
},
3934
}
@@ -43,8 +38,7 @@ export const ohtuRAGTest: functionCallObject = {
4338
definition: {
4439
type: 'function',
4540
name: 'ohtu_retrieval',
46-
description:
47-
'Helsingin yliopiston ohjelmistotuotannon kurssimateriaalin haku funktio. Kutsu tätä kun käyttäjä haluaa tietoa kurssiin liittyen. Muuten älä kutsu tätä.',
41+
description: 'Helsingin yliopiston ohjelmistotuotannon kurssimateriaalin haku funktio. Kutsu tätä kun käyttäjä haluaa tietoa kurssiin liittyen. Muuten älä kutsu tätä.',
4842
parameters: {
4943
type: 'object',
5044
properties: {

src/server/util/azure/util.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import { FileSearchTool } from 'openai/resources/responses/responses'
2+
import { ComparisonFilter, CompoundFilter } from 'openai/resources/shared'
3+
4+
/**
5+
* Creates a file search tool configuration for Azure OpenAI for a specific vector store.
6+
*
7+
* @param vectorStoreId - The ID of the vector store to search.
8+
* @param maxResults - The maximum number of results to return. Default is 5.
9+
* @param filters - Optional filters to apply to the search results. Check types for `ComparisonFilter` and `CompoundFilter` from OpenAI resources.
10+
* @param rankingOptions - Optional ranking options to apply to the search results. Check `FileSearchTool.RankingOptions` from OpenAI resources.
11+
*
12+
* @returns An object representing the file search tool configuration.
13+
*/
14+
export const createFileSearchTool = ({
15+
vectorStoreId,
16+
maxResults = 5,
17+
filters,
18+
rankingOptions,
19+
}: {
20+
vectorStoreId: string
21+
maxResults?: number
22+
filters?: ComparisonFilter | CompoundFilter
23+
rankingOptions?: FileSearchTool.RankingOptions
24+
}): FileSearchTool => {
25+
return {
26+
type: 'file_search',
27+
vector_store_ids: [vectorStoreId],
28+
max_num_results: maxResults,
29+
filters,
30+
ranking_options: rankingOptions,
31+
}
32+
}

0 commit comments

Comments
 (0)