|
55 | 55 | @item-click="handlePromptItemClick" |
56 | 56 | ></tr-prompts> |
57 | 57 | </div> |
58 | | - <tr-bubble-provider :content-renderers="contentRenderers" v-else> |
| 58 | + <tr-bubble-provider |
| 59 | + :content-renderers="aiType === AI_MODES['Chat'] ? contentRenderers : buildContentRenderers" |
| 60 | + v-else |
| 61 | + > |
59 | 62 | <tr-bubble-list :items="activeMessages" :roles="roles" autoScroll></tr-bubble-list> |
60 | 63 | </tr-bubble-provider> |
61 | 64 | </div> |
@@ -144,11 +147,12 @@ import { utils } from '@opentiny/tiny-engine-utils' |
144 | 147 | import RobotSettingPopover from './RobotSettingPopover.vue' |
145 | 148 | import { PROMPTS } from './js/prompts' |
146 | 149 | import * as jsonpatch from 'fast-json-patch' |
147 | | -import { chatStream, checkComponentNameExists } from './js/utils' |
| 150 | +import { checkComponentNameExists, processSSEStream } from './js/utils' |
148 | 151 | import McpServer from './mcp/McpServer.vue' |
149 | 152 | import useMcpServer from './mcp/useMcp' |
150 | 153 | import MarkdownRenderer from './mcp/MarkdownRenderer.vue' |
151 | 154 | import LoadingRenderer from './mcp/LoadingRenderer.vue' |
| 155 | +import BuildLoadingRenderer from './BuildLoadingRenderer.vue' |
152 | 156 | import { sendMcpRequest, serializeError } from './mcp/utils' |
153 | 157 | import type { RobotMessage } from './mcp/types' |
154 | 158 | import RobotTypeSelect from './RobotTypeSelect.vue' |
@@ -395,73 +399,85 @@ export default { |
395 | 399 | } |
396 | 400 |
|
397 | 401 | let streamContent = '' |
| 402 | + let lastResponseLength = 0 |
398 | 403 | const chatId = Date.now().toString() |
399 | 404 | const currentJson = deepClone(pageState.pageSchema) |
400 | 405 | let lastExecutionTime = 0 |
401 | 406 | const throttleDelay = 3000 |
402 | | - await chatStream( |
403 | | - { |
404 | | - requestUrl: '/app-center/api/ai/chat', |
405 | | - requestData: params |
406 | | - }, |
407 | | - { |
408 | | - onData: (data) => { |
409 | | - const choice = data.choices?.[0] |
410 | | - if (choice && choice.delta.content) { |
411 | | - if (messages.value.length === 0 || messages.value[messages.value.length - 1].role !== 'assistant') { |
412 | | - messages.value.push(getAiDisplayMessage('', 'assistant', {}, chatId)) |
413 | | - } |
414 | | - if (streamContent !== messages.value[messages.value.length - 1].content) { |
415 | | - messages.value[messages.value.length - 1].content = '' |
416 | | - } |
417 | | - streamContent += choice.delta.content |
418 | | - messages.value[messages.value.length - 1].content += choice.delta.content |
419 | | - const currentTime = Date.now() |
420 | | - if (currentTime - lastExecutionTime > throttleDelay) { |
421 | | - try { |
422 | | - const repaired = jsonrepair(streamContent) |
423 | | - const parsedJson = JSON.parse(repaired) |
424 | | - const result = parsedJson.reduce((acc, patch) => { |
425 | | - return jsonpatch.applyPatch(acc, [patch], false, false).newDocument |
426 | | - }, currentJson) |
427 | | - const editorValue = string2Obj(obj2String(result)) |
428 | | -
|
429 | | - if (editorValue && checkComponentNameExists(result)) { |
430 | | - setSchema(result) |
431 | | - } |
432 | | - } catch (error) { |
433 | | - // error |
434 | | - } |
435 | | - lastExecutionTime = currentTime |
436 | | - } |
437 | | - } |
438 | | - }, |
439 | | - onError: (error) => { |
440 | | - messages.value[messages.value.length - 1].content = '连接失败' |
441 | | - localStorage.removeItem('aiChat') |
442 | | - inProcesing.value = false |
443 | | - connectedFailed.value = false |
444 | | - // eslint-disable-next-line no-console |
445 | | - console.error('Stream error:', error) |
| 407 | + requestLoading.value = true |
| 408 | + getMetaApi(META_SERVICE.Http) |
| 409 | + .stream({ |
| 410 | + method: 'POST', |
| 411 | + url: '/app-center/api/ai/chat', |
| 412 | + data: params, |
| 413 | + headers: { |
| 414 | + 'Content-Type': 'application/json', |
| 415 | + Accept: 'text/event-stream', |
| 416 | + Authorization: `Bearer ${robotSettingState.selectedModel.apiKey || import.meta.env.VITE_API_TOKEN}` |
446 | 417 | }, |
447 | | - onDone: () => { |
448 | | - handleResponse( |
449 | | - { |
450 | | - id: chatId, |
451 | | - chatMessage: { |
452 | | - role: 'assistant', |
453 | | - content: streamContent || '没有返回内容', |
454 | | - name: 'AI' |
| 418 | + onDownloadProgress: (progressEvent) => { |
| 419 | + const currentResponse = progressEvent.currentTarget.responseText |
| 420 | + const newData = currentResponse.substring(lastResponseLength) |
| 421 | + lastResponseLength = currentResponse.length |
| 422 | + processSSEStream(newData, { |
| 423 | + onData: (data) => { |
| 424 | + const choice = data.choices?.[0] |
| 425 | + if (choice && choice.delta.content) { |
| 426 | + if (messages.value.length === 0 || messages.value[messages.value.length - 1].role !== 'assistant') { |
| 427 | + messages.value.push(getAiDisplayMessage('', 'assistant', {}, chatId)) |
| 428 | + } |
| 429 | + if (streamContent !== messages.value[messages.value.length - 1].content) { |
| 430 | + messages.value[messages.value.length - 1].content = '' |
| 431 | + } |
| 432 | + streamContent += choice.delta.content |
| 433 | + messages.value.at(-1)!.renderContent = [{ type: 'loading', content: streamContent }] |
| 434 | + const currentTime = Date.now() |
| 435 | + if (currentTime - lastExecutionTime > throttleDelay) { |
| 436 | + try { |
| 437 | + const repaired = jsonrepair(streamContent) |
| 438 | + const parsedJson = JSON.parse(repaired) |
| 439 | + const result = parsedJson.reduce((acc, patch) => { |
| 440 | + return jsonpatch.applyPatch(acc, [patch], false, false).newDocument |
| 441 | + }, currentJson) |
| 442 | + const editorValue = string2Obj(obj2String(result)) |
| 443 | +
|
| 444 | + if (editorValue && checkComponentNameExists(result)) { |
| 445 | + setSchema(result) |
| 446 | + } |
| 447 | + } catch (error) { |
| 448 | + // error |
| 449 | + } |
| 450 | + lastExecutionTime = currentTime |
| 451 | + } |
455 | 452 | } |
456 | 453 | }, |
457 | | - currentJson |
458 | | - ) |
| 454 | + onDone: () => { |
| 455 | + requestLoading.value = false |
| 456 | + delete messages.value.at(-1)!.renderContent |
| 457 | + handleResponse( |
| 458 | + { |
| 459 | + id: chatId, |
| 460 | + chatMessage: { |
| 461 | + role: 'assistant', |
| 462 | + content: streamContent || '没有返回内容', |
| 463 | + name: 'AI' |
| 464 | + } |
| 465 | + }, |
| 466 | + currentJson |
| 467 | + ) |
| 468 | + } |
| 469 | + }) |
459 | 470 | } |
460 | | - }, |
461 | | - { |
462 | | - Authorization: `Bearer ${robotSettingState.selectedModel.apiKey || import.meta.env.VITE_API_TOKEN}` |
463 | | - } |
464 | | - ) |
| 471 | + }) |
| 472 | + .catch((error) => { |
| 473 | + messages.value[messages.value.length - 1].content = '连接失败' |
| 474 | + localStorage.removeItem('aiChat') |
| 475 | + requestLoading.value = false |
| 476 | + inProcesing.value = false |
| 477 | + connectedFailed.value = false |
| 478 | + // eslint-disable-next-line no-console |
| 479 | + console.error('Stream error:', error) |
| 480 | + }) |
465 | 481 | } |
466 | 482 | } |
467 | 483 |
|
@@ -785,6 +801,10 @@ export default { |
785 | 801 | loading: LoadingRenderer |
786 | 802 | } |
787 | 803 |
|
| 804 | + const buildContentRenderers: Record<string, Component> = { |
| 805 | + loading: BuildLoadingRenderer |
| 806 | + } |
| 807 | +
|
788 | 808 | const mcpDrawerPosition = computed(() => { |
789 | 809 | return { |
790 | 810 | type: 'fixed', |
@@ -841,7 +861,8 @@ export default { |
841 | 861 | typeChange, |
842 | 862 | isVisualModel, |
843 | 863 | contentRenderers, |
844 | | - mcpDrawerPosition |
| 864 | + mcpDrawerPosition, |
| 865 | + buildContentRenderers |
845 | 866 | } |
846 | 867 | } |
847 | 868 | } |
|
0 commit comments