Skip to content

Commit 5088a04

Browse files
author
samcraftt
committed
first rag chatbot commit
1 parent 3bc934e commit 5088a04

File tree

28 files changed

+7232
-2321
lines changed

28 files changed

+7232
-2321
lines changed

client/package-lock.json

Lines changed: 5459 additions & 2310 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

client/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"path": "^0.12.7",
2020
"react": "^18.3.1",
2121
"react-dom": "^18.3.1",
22+
"react-markdown": "^10.1.0",
2223
"react-router-dom": "^6.23.1",
2324
"react-scripts": "^5.0.1",
2425
"utif": "^3.1.0",

client/src/components/Chatbot.js

Lines changed: 182 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,182 @@
1+
import React, { useEffect, useState, useRef } from 'react'
2+
import { Button, Input, List, Typography, Space, Spin, Popconfirm } from 'antd'
3+
import { SendOutlined, CloseOutlined, DeleteOutlined } from '@ant-design/icons'
4+
import { queryChatBot, clearChat } from '../utils/api'
5+
import ReactMarkdown from 'react-markdown'
6+
7+
const { TextArea } = Input
8+
const { Text } = Typography
9+
const initialMessage = [{ id: 1, text: "Hello! I'm your AI assistant. How can I help you today?", isUser: false }]
10+
11+
function Chatbot({ onClose }) {
12+
const [messages, setMessages] = useState(() => {
13+
const saved = localStorage.getItem('chatMessages')
14+
return saved ? JSON.parse(saved) : initialMessage
15+
})
16+
const [inputValue, setInputValue] = useState('')
17+
const [isSending, setIsSending] = useState(false)
18+
const lastMessageRef = useRef(null)
19+
20+
const scrollToLastMessage = () => {
21+
setTimeout(() => {
22+
if (lastMessageRef.current) {
23+
lastMessageRef.current.scrollIntoView({ behavior: 'smooth', block: 'start' })
24+
}
25+
}, 0)
26+
}
27+
28+
useEffect(() => {
29+
localStorage.setItem('chatMessages', JSON.stringify(messages))
30+
}, [messages])
31+
32+
useEffect(() => {
33+
scrollToLastMessage()
34+
}, [messages, isSending])
35+
36+
const handleSendMessage = async () => {
37+
if (!inputValue.trim() || isSending) return
38+
const query = inputValue
39+
setInputValue('')
40+
const userMessage = { id: messages.length + 1, text: query, isUser: true }
41+
setMessages(prev => [...prev, userMessage])
42+
setIsSending(true)
43+
try {
44+
const responseText = await queryChatBot(query)
45+
const botMessage = { id: userMessage.id + 1, text: responseText || 'Sorry, I could not generate a response.', isUser: false }
46+
setMessages(prev => [...prev, botMessage])
47+
} catch (e) {
48+
setMessages(prev => [...prev, { id: prev.length + 1, text: 'Error contacting chatbot.', isUser: false }])
49+
} finally {
50+
setIsSending(false)
51+
}
52+
}
53+
54+
const handleKeyPress = (e) => {
55+
if (e.key === 'Enter' && !e.shiftKey) {
56+
e.preventDefault()
57+
handleSendMessage()
58+
}
59+
}
60+
61+
const handleClearChat = async () => {
62+
try {
63+
await clearChat()
64+
setMessages(initialMessage)
65+
localStorage.setItem('chatMessages', JSON.stringify(initialMessage))
66+
} catch (e) {
67+
console.error('Failed to clear chat:', e)
68+
}
69+
}
70+
71+
return (
72+
<div
73+
style={{
74+
height: '100vh',
75+
display: 'flex',
76+
flexDirection: 'column'
77+
}}
78+
>
79+
<div
80+
style={{
81+
padding: '16px',
82+
display: 'flex',
83+
justifyContent: 'space-between',
84+
alignItems: 'center',
85+
}}
86+
>
87+
<Text strong>AI Assistant</Text>
88+
<Space>
89+
<Popconfirm
90+
title="Clear chat history"
91+
onConfirm={handleClearChat}
92+
okText="Clear"
93+
cancelText="Cancel"
94+
>
95+
<Button
96+
type="text"
97+
icon={<DeleteOutlined />}
98+
size="small"
99+
/>
100+
</Popconfirm>
101+
<Button
102+
type="text"
103+
icon={<CloseOutlined />}
104+
onClick={onClose}
105+
size="small"
106+
/>
107+
</Space>
108+
</div>
109+
<div
110+
style={{
111+
flex: 1,
112+
overflow: 'auto',
113+
padding: '16px',
114+
}}
115+
>
116+
<List
117+
dataSource={messages}
118+
renderItem={(message, index) => {
119+
const isLastMessage = index === messages.length - 1
120+
return (
121+
<List.Item
122+
ref={isLastMessage ? lastMessageRef : null}
123+
style={{
124+
border: 'none',
125+
padding: '8px 0',
126+
justifyContent: message.isUser ? 'flex-end' : 'flex-start'
127+
}}
128+
>
129+
<div
130+
style={{
131+
maxWidth: '80%',
132+
padding: '8px 12px',
133+
borderRadius: '12px',
134+
backgroundColor: message.isUser ? '#1890ff' : '#f5f5f5',
135+
color: message.isUser ? 'white' : 'black',
136+
137+
}}
138+
>
139+
{message.isUser ? (
140+
<Text style={{ color: 'white' }}>
141+
{message.text}
142+
</Text>
143+
) : (
144+
<ReactMarkdown
145+
components={{
146+
ul: ({ children }) => <ul style={{ paddingLeft: '20px' }}>{children}</ul>,
147+
ol: ({ children }) => <ol style={{ paddingLeft: '20px' }}>{children}</ol>
148+
}}
149+
>
150+
{message.text}
151+
</ReactMarkdown>
152+
)}
153+
</div>
154+
</List.Item>
155+
)
156+
}}
157+
/>
158+
{isSending && (
159+
<Spin size="small" />
160+
)}
161+
</div>
162+
<div style={{ padding: '16px' }}>
163+
<Space.Compact style={{ width: '100%' }}>
164+
<TextArea
165+
value={inputValue}
166+
onChange={(e) => setInputValue(e.target.value)}
167+
onKeyPress={handleKeyPress}
168+
placeholder="Type your message..."
169+
autoSize={{ minRows: 1, maxRows: 3 }}
170+
/>
171+
<Button
172+
icon={<SendOutlined />}
173+
onClick={handleSendMessage}
174+
disabled={!inputValue.trim() || isSending}
175+
/>
176+
</Space.Compact>
177+
</div>
178+
</div>
179+
)
180+
}
181+
182+
export default Chatbot

client/src/utils/api.js

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,3 +170,25 @@ export async function stopModelInference () {
170170
handleError(error)
171171
}
172172
}
173+
174+
export async function queryChatBot (query) {
175+
try {
176+
const res = await axios.post(
177+
`${process.env.REACT_APP_API_PROTOCOL}://${process.env.REACT_APP_API_URL}/chat/query`,
178+
{ query }
179+
)
180+
return res.data?.response
181+
} catch (error) {
182+
handleError(error)
183+
}
184+
}
185+
186+
export async function clearChat () {
187+
try {
188+
await axios.post(
189+
`${process.env.REACT_APP_API_PROTOCOL}://${process.env.REACT_APP_API_URL}/chat/clear`
190+
)
191+
} catch (error) {
192+
handleError(error)
193+
}
194+
}

client/src/views/Views.js

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@ import Visualization from '../views/Visualization'
44
import ModelTraining from '../views/ModelTraining'
55
import ModelInference from '../views/ModelInference'
66
import Monitoring from '../views/Monitoring'
7-
import { Layout, Menu } from 'antd'
7+
import Chatbot from '../components/Chatbot'
8+
import { Layout, Menu, Button } from 'antd'
9+
import { MessageOutlined } from '@ant-design/icons'
810
import { getNeuroglancerViewer } from '../utils/api'
911

1012
const { Content, Sider } = Layout
@@ -14,6 +16,7 @@ function Views () {
1416
const [viewers, setViewers] = useState([])
1517
const [isLoading, setIsLoading] = useState(false)
1618
const [isInferring, setIsInferring] = useState(false)
19+
const [isChatOpen, setIsChatOpen] = useState(false)
1720
console.log(viewers)
1821

1922
const onClick = (e) => {
@@ -122,6 +125,24 @@ function Views () {
122125
{renderMenu()}
123126
</Content>
124127
</Layout>
128+
{isChatOpen ? (
129+
<Sider
130+
width={400}
131+
theme='light'
132+
>
133+
<Chatbot onClose={() => setIsChatOpen(false)} />
134+
</Sider>
135+
) : (
136+
<Button
137+
type="primary"
138+
shape="circle"
139+
icon={<MessageOutlined />}
140+
onClick={() => setIsChatOpen(true)}
141+
style={{
142+
margin: '8px 8px'
143+
}}
144+
/>
145+
)}
125146
</>
126147
)}
127148
</Layout>

server_api/chatbot/chatbot.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
from langchain_ollama import OllamaEmbeddings, ChatOllama
2+
from langchain_community.vectorstores import FAISS
3+
from langchain_classic.chains import ConversationalRetrievalChain
4+
from langchain_classic.memory import ConversationBufferMemory
5+
from langchain_classic.prompts import ChatPromptTemplate
6+
from utils.utils import process_path
7+
8+
embeddings = OllamaEmbeddings(model='mistral:latest', base_url='http://cscigpu08.bc.edu:11434')
9+
faiss_path = process_path('server_api/chatbot/faiss_index')
10+
vectorstore = FAISS.load_local(faiss_path, embeddings, allow_dangerous_deserialization=True)
11+
retriever = vectorstore.as_retriever()
12+
system_prompt = '''
13+
You are a helpful AI assistant for the PyTorch Connectomics client, designed to help non-technical users navigate and use the application effectively.
14+
IMPORTANT GUIDELINES:
15+
- You are helping end-users who have no programming knowledge
16+
- Focus on what users can see and do in the interface, not technical implementation details
17+
- Provide concise, step-by-step instructions for using the platform
18+
- Explain features in terms of user actions (clicking buttons, navigating menus, etc.)
19+
- Avoid technical jargon, API endpoints, or code-related explanations
20+
EXAMPLES OF GOOD vs BAD RESPONSES:
21+
BAD: "You need to set the isTraining boolean to true and call the start_model_training endpoint"
22+
GOOD: "To start training a model, go to the 'Model Training' tab, configure your training parameters using the step-by-step wizard, then click the 'Start Training' button"
23+
BAD: "Access the /neuroglancer endpoint with image and label paths"
24+
GOOD: "To visualize your data, first upload your image and label files using the drag-and-drop area, then select them from the dropdown menus, enter the voxel scales, and click 'Visualize'"
25+
BAD: "The trainingStatus state variable tracks the current training progress"
26+
GOOD: "You can monitor your training progress by checking the status message below the training buttons, or by going to the 'Tensorboard' tab to see detailed metrics"
27+
Remember: Help users navigate the no-code interface, not understand the underlying technical architecture.
28+
Here is the related content that will help you answer the user's question:
29+
{context}
30+
'''
31+
prompt = ChatPromptTemplate.from_messages([
32+
('system', system_prompt),
33+
('human', '{question}')
34+
])
35+
llm = ChatOllama(model='mistral:latest', base_url='http://cscigpu08.bc.edu:11434', temperature=0)
36+
memory = ConversationBufferMemory(return_messages=True, memory_key="chat_history")
37+
chain = ConversationalRetrievalChain.from_llm(
38+
llm=llm,
39+
retriever=retriever,
40+
memory=memory,
41+
combine_docs_chain_kwargs={"prompt": prompt}
42+
)
288 KB
Binary file not shown.
60.2 KB
Binary file not shown.

0 commit comments

Comments
 (0)