Skip to content

Commit 5fd5c58

Browse files
committed
fix: Possibly wrong temperature used when prompt as temperature specified
1 parent 78e57d4 commit 5fd5c58

File tree

5 files changed

+10
-8
lines changed

5 files changed

+10
-8
lines changed

src/client/components/ChatV2/ChatV2.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,7 @@ const ChatV2Content = () => {
194194

195195
const generationInfo: MessageGenerationInfo = {
196196
model: acualModel,
197+
temperature: modelTemperature,
197198
promptInfo,
198199
}
199200

@@ -204,7 +205,6 @@ const ChatV2Content = () => {
204205
options: {
205206
generationInfo,
206207
chatMessages: newMessages,
207-
modelTemperature,
208208
saveConsent,
209209
},
210210
courseId,

src/server/routes/ai/v3.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ router.post('/stream', upload.single('file'), async (r, res) => {
142142
systemMessage,
143143
promptMessages: prompt?.messages,
144144
model: prompt?.model ?? generationInfo.model,
145-
temperature: prompt?.temperature ?? options.modelTemperature,
145+
temperature: prompt?.temperature ?? generationInfo.temperature ?? undefined,
146146
tools,
147147
writeEvent: async (event: ChatEvent) => {
148148
await new Promise((resolve) => {

src/server/services/langchain/MockModel.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@ import { StructuredTool } from '@langchain/core/tools'
1111
* FakeStreamingChatModel echoes the first input message out of the box.
1212
*/
1313
export class MockModel extends FakeStreamingChatModel {
14-
temperature: number
14+
temperature?: number | null
1515

16-
constructor({ tools, temperature }: { tools: StructuredTool[]; temperature: number }) {
16+
constructor({ tools, temperature }: { tools: StructuredTool[]; temperature?: number | null }) {
1717
super({
1818
sleep: 5,
1919
})

src/server/services/langchain/chat.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ type ChatModel = Runnable<BaseLanguageModelInput, AIMessageChunk, BaseChatModelC
2323
* @param temperature The temperature for the model's responses.
2424
* @returns A chat model instance.
2525
*/
26-
const getChatModel = (modelConfig: (typeof validModels)[number], tools: StructuredTool[], temperature: number): ChatModel => {
26+
const getChatModel = (modelConfig: (typeof validModels)[number], tools: StructuredTool[], temperature?: number): ChatModel => {
2727
const chatModel =
2828
modelConfig.name === 'mock'
2929
? new MockModel({ tools, temperature })
@@ -78,7 +78,7 @@ export const streamChat = async ({
7878
user,
7979
}: {
8080
model: ValidModelName
81-
temperature: number
81+
temperature?: number
8282
systemMessage: string
8383
chatMessages: ChatMessage[]
8484
promptMessages?: Message[]

src/shared/chat.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,10 @@ export type ChatMessage = UserMessage | AssistantMessage
5858
export type ChatRole = ChatMessage['role']
5959

6060
export const MessageGenerationInfoSchema = z.object({
61-
model: z.union(validModels.map((m) => z.literal(m.name))),
61+
// May be overridden by prompt
62+
model: ValidModelNameSchema,
63+
// May be overridden by prompt
64+
temperature: z.number().min(0).max(1).optional().nullable(),
6265
promptInfo: z.discriminatedUnion('type', [
6366
z.object({
6467
type: z.literal('saved'),
@@ -87,7 +90,6 @@ export const PostStreamSchemaV3 = z.object({
8790
chatMessages: z.array(ChatMessageSchema),
8891
generationInfo: MessageGenerationInfoSchema,
8992
userConsent: z.boolean().optional(),
90-
modelTemperature: z.number().min(0).max(2),
9193
saveConsent: z.boolean().optional(),
9294
courseId: z.string().optional(),
9395
}),

0 commit comments

Comments
 (0)