diff --git a/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/Chat.tsx b/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/Chat.tsx index b110742ec..f46cd55f6 100644 --- a/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/Chat.tsx +++ b/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/Chat.tsx @@ -15,7 +15,7 @@ import { } from '@elastic/eui' import { css } from '@emotion/react' import * as React from 'react' -import { useCallback, useEffect, useRef } from 'react' +import { useCallback, useEffect, useRef, useState } from 'react' const containerStyles = css` height: 100%; @@ -66,6 +66,7 @@ export const Chat = () => { const inputRef = useRef(null) const scrollRef = useRef(null) const lastMessageStatusRef = useRef(null) + const [inputValue, setInputValue] = useState('') const dynamicScrollableStyles = css` ${scrollableStyles} @@ -81,6 +82,7 @@ export const Chat = () => { if (inputRef.current) { inputRef.current.value = '' } + setInputValue('') // Scroll to bottom after new message setTimeout(() => scrollToBottom(scrollRef.current), 100) @@ -202,6 +204,7 @@ export const Chat = () => { inputRef={inputRef} fullWidth placeholder="Ask Elastic Docs AI Assistant" + onChange={(e) => setInputValue(e.target.value)} onKeyDown={(e) => { if (e.key === 'Enter') { handleSubmit(e.currentTarget.value) @@ -219,7 +222,7 @@ export const Chat = () => { `} color="primary" iconType="sortUp" - display="base" + display={inputValue.trim() ? 'fill' : 'base'} onClick={() => { if (inputRef.current) { handleSubmit(inputRef.current.value) diff --git a/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/ChatMessage.tsx b/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/ChatMessage.tsx index 856c7addd..0e8bb6f08 100644 --- a/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/ChatMessage.tsx +++ b/src/Elastic.Documentation.Site/Assets/web-components/SearchOrAskAi/AskAi/ChatMessage.tsx @@ -1,8 +1,9 @@ import { initCopyButton } from '../../../copybutton' +import { GeneratingStatus } from './GeneratingStatus' +import { References } from './RelatedResources' import { ChatMessage as ChatMessageType } from './chat.store' import { LlmGatewayMessage } from './useLlmGateway' import { - EuiAvatar, EuiButtonIcon, EuiCallOut, EuiCopy, @@ -10,7 +11,6 @@ import { EuiFlexItem, EuiIcon, EuiLoadingElastic, - EuiLoadingSpinner, EuiPanel, EuiSpacer, EuiText, @@ -58,6 +58,30 @@ const getAccumulatedContent = (messages: LlmGatewayMessage[]) => { .join('') } +const splitContentAndReferences = ( + content: string +): { mainContent: string; referencesJson: string | null } => { + const startDelimiter = '' + + const startIndex = content.indexOf(startDelimiter) + if (startIndex === -1) { + return { mainContent: content, referencesJson: null } + } + + const endIndex = content.indexOf(endDelimiter, startIndex) + if (endIndex === -1) { + return { mainContent: content, referencesJson: null } + } + + const mainContent = content.substring(0, startIndex).trim() + const referencesJson = content + .substring(startIndex + startDelimiter.length, endIndex) + .trim() + + return { mainContent, referencesJson } +} + const getMessageState = (message: ChatMessageType) => ({ isUser: message.type === 'user', isLoading: message.status === 'streaming', @@ -65,6 +89,59 @@ const getMessageState = (message: ChatMessageType) => ({ hasError: message.status === 'error', }) +// Helper functions for computing AI status +const getToolCallSearchQuery = ( + messages: LlmGatewayMessage[] +): string | null => { + const toolCallMessage = messages.find((m) => m.type === 'tool_call') + if (!toolCallMessage) return null + + try { + const toolCalls = toolCallMessage.data?.toolCalls + if (toolCalls && toolCalls.length > 0) { + const firstToolCall = toolCalls[0] + return firstToolCall.args?.searchQuery || null + } + } catch (e) { + console.error('Error extracting search query from tool call:', e) + } + + return null +} + +const hasContentStarted = (messages: LlmGatewayMessage[]): boolean => { + return messages.some((m) => m.type === 'ai_message_chunk' && m.data.content) +} + +const hasReachedReferences = (messages: LlmGatewayMessage[]): boolean => { + const accumulatedContent = messages + .filter((m) => m.type === 'ai_message_chunk') + .map((m) => m.data.content) + .join('') + return accumulatedContent.includes(' + ``` + + **JSON Schema Definition:** + ```json + { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "List of Documentation Resources", + "description": "A list of objects, each representing a documentation resource with a URL, title, and description.", + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "description": "The URL of the resource.", + "type": "string", + "format": "uri" + }, + "title": { + "description": "The title of the resource.", + "type": "string" + }, + "description": { + "description": "A brief description of the resource.", + "type": "string" + } + }, + "required": [ + "url", + "title", + "description" + ] + } + } """; }