diff --git a/docs/src/public/sw.js b/docs/src/public/sw.js index 2255182ef..35c8221eb 100644 --- a/docs/src/public/sw.js +++ b/docs/src/public/sw.js @@ -39,38 +39,56 @@ const getMockData = (messages) => { return template.replace('{{lastMessage}}', lastMessage) } +async function delay(start = 100, end) { + end = end || start + return new Promise((resolve) => setTimeout(resolve, Math.random() * (end - start) + start)) +} + +async function mockStreamData(responseData, { model, isReasoningContent = false } = {}, callback = () => {}) { + const responseParts = responseData.split('') + for (let i = 0; i < responseParts.length; i++) { + const part = responseParts[i] + const data = { + id: 'mock-' + Date.now(), + object: 'chat.completion.chunk', + created: Math.floor(Date.now() / 1000), + model: model || 'gpt-3.5-turbo', + choices: [ + { + index: 0, + delta: { + [isReasoningContent? 'reasoning_content': 'content']: part, + }, + finish_reason: i === responseParts.length - 1 && !isReasoningContent ? 'stop' : null, + }, + ], + } + + callback(data) + await delay(...(isReasoningContent?[30, 70]:[50, 150])) + } +} + function generateStreamResponse(body) { const mockData = getMockData(body.messages) - const responseParts = mockData.split('') + + const isReasonModel = body.model.includes('reason') const stream = new ReadableStream({ async start(controller) { controller.enqueue(encoder.encode('data: {}\n\n')) - await new Promise((resolve) => setTimeout(resolve, 300)) - - for (let i = 0; i < responseParts.length; i++) { - const part = responseParts[i] - const data = { - id: 'mock-' + Date.now(), - object: 'chat.completion.chunk', - created: Math.floor(Date.now() / 1000), - model: body.model || 'gpt-3.5-turbo', - choices: [ - { - index: 0, - delta: { - content: part, - }, - finish_reason: i === responseParts.length - 1 ? 'stop' : null, - }, - ], - } + await delay(100) + if (isReasonModel) { + await mockStreamData('深度思考:\n' + mockData, { model: body.model, isReasoningContent: true }, (data) => { + controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`)) + }) + } + await mockStreamData(mockData, { model: body.model }, (data) => { controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`)) + }) - await new Promise((resolve) => setTimeout(resolve, Math.random() * 100 + 50)) - } controller.enqueue(encoder.encode('data: [DONE]\n\n')) controller.close() diff --git a/packages/kit/src/types.ts b/packages/kit/src/types.ts index b20670059..592a3e864 100644 --- a/packages/kit/src/types.ts +++ b/packages/kit/src/types.ts @@ -8,7 +8,8 @@ export type MessageRole = 'system' | 'user' | 'assistant' */ export interface ChatMessage { role: MessageRole - content: string + content: string | null + reasoning_content?: string | null name?: string } @@ -82,6 +83,7 @@ export interface ChatCompletionResponse { export interface ChatCompletionStreamResponseDelta { content?: string role?: MessageRole + reasoning_content?: string | null } /** diff --git a/packages/kit/src/vue/message/useMessage.ts b/packages/kit/src/vue/message/useMessage.ts index a286f425f..4efa4be29 100644 --- a/packages/kit/src/vue/message/useMessage.ts +++ b/packages/kit/src/vue/message/useMessage.ts @@ -126,6 +126,12 @@ export function useMessage(options: UseMessageOptions): UseMessageReturn { messages.value.push({ role: 'assistant', content: '' }) } const choice = data.choices?.[0] + if (choice && choice.delta.reasoning_content) { + if (!messages.value[messages.value.length - 1].reasoning_content) { + messages.value[messages.value.length - 1].reasoning_content = '' + } + messages.value[messages.value.length - 1].reasoning_content += choice.delta.reasoning_content + } if (choice && choice.delta.content) { messages.value[messages.value.length - 1].content += choice.delta.content }