Skip to content

Commit cc7144e

Browse files
authored
Merge pull request #190 from CS3219-AY2425S1/feat/collab-service/ai-chat
Add AI Chat
2 parents 3ecd4f3 + 654b91d commit cc7144e

File tree

8 files changed

+78
-4
lines changed

8 files changed

+78
-4
lines changed

.env.sample

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ MATCHING_SVC_PORT=6969
2525
## Collab service variables
2626
COLLAB_SVC_PORT=3002
2727
COLLAB_SVC_DB_URI=
28+
OPENAI_API_KEY=
2829

2930
## Redis variables
3031
REDIS_PORT=6379
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import { sendAiMessage } from "../model/repository.js";
2+
3+
// send ai message
4+
export async function sendAiMessageController(req, res) {
5+
const { message } = req.body;
6+
if (!message) {
7+
return res.status(400).json({ error: "Message content is required" });
8+
}
9+
10+
const data = await sendAiMessage(message);
11+
const aiResponse =
12+
data.choices?.[0]?.message?.content || "No response from AI";
13+
14+
if (aiResponse) {
15+
res.status(200).json({ data });
16+
} else {
17+
res.status(500).json({ error: "Failed to retrieve AI response" });
18+
}
19+
}

collab-service/app/model/repository.js

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,3 +120,23 @@ export async function getQuestionIdByRoomId(roomId) {
120120
return null;
121121
}
122122
}
123+
124+
export async function sendAiMessage(message) {
125+
try {
126+
const response = await fetch("https://api.openai.com/v1/chat/completions", {
127+
method: "POST",
128+
headers: {
129+
'Content-Type': "application/json",
130+
'Authorization': `Bearer ${process.env.OPENAI_API_KEY}`,
131+
},
132+
body: JSON.stringify({
133+
model: "gpt-3.5-turbo",
134+
messages: [{ role: "user", content: message }],
135+
}),
136+
});
137+
const data = await response.json();
138+
return data;
139+
} catch (error) {
140+
console.error("Error in sending AI message:", error);
141+
}
142+
}

collab-service/app/routes/collab-routes.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import {
77
getRoomChatHistory,
88
getQuestionId,
99
} from "../controller/collab-controller.js";
10+
import { sendAiMessageController } from "../controller/ai-controller.js";
1011

1112
const router = express.Router();
1213

@@ -22,4 +23,6 @@ router.get("/chat-history/:roomId", getRoomChatHistory);
2223

2324
router.get("/rooms/:roomId/questionId", getQuestionId);
2425

26+
router.post("/send-ai-message", sendAiMessageController)
27+
2528
export default router;

docker-compose.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,4 +79,5 @@ services:
7979
- $COLLAB_SVC_PORT:$COLLAB_SVC_PORT
8080
environment:
8181
- PORT=$COLLAB_SVC_PORT
82-
- DB_URI=$COLLAB_SVC_DB_URI
82+
- DB_URI=$COLLAB_SVC_DB_URI
83+
- OPENAI_API_KEY=$OPENAI_API_KEY

frontend/Dockerfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ ENV NEXT_PUBLIC_BASE_URI=$BASE_URI \
1313
NEXT_PUBLIC_USER_SVC_PORT=$USER_SVC_PORT \
1414
NEXT_PUBLIC_QUESTION_SVC_PORT=$QUESTION_SVC_PORT \
1515
NEXT_PUBLIC_MATCHING_SVC_PORT=$MATCHING_SVC_PORT \
16-
NEXT_PUBLIC_COLLAB_SVC_PORT=$COLLAB_SVC_PORT
17-
16+
NEXT_PUBLIC_COLLAB_SVC_PORT=$COLLAB_SVC_PORT
17+
1818
# Production build stage
1919
FROM base AS build
2020
COPY . .

frontend/components/collab/chat.tsx

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import { Send } from "lucide-react";
1010
import { io, Socket } from "socket.io-client";
1111
import { useAuth } from "@/app/auth/auth-context";
1212
import LoadingScreen from "@/components/common/loading-screen";
13+
import { sendAiMessage } from "@/lib/api/openai/send-ai-message";
1314
import { getChatHistory } from "@/lib/api/collab-service/get-chat-history";
1415

1516
interface Message {
@@ -134,7 +135,7 @@ export default function Chat({ roomId }: { roomId: string }) {
134135
scrollWithDelay();
135136
}, [partnerMessages, aiMessages, chatTarget]);
136137

137-
const sendMessage = () => {
138+
const sendMessage = async () => {
138139
if (!newMessage.trim() || !socket || !isConnected || !own_user_id) return;
139140

140141
if (chatTarget === "partner") {
@@ -151,6 +152,18 @@ export default function Chat({ roomId }: { roomId: string }) {
151152
timestamp: new Date(),
152153
};
153154
setAiMessages((prev) => [...prev, message]);
155+
const response = await sendAiMessage(newMessage);
156+
const data = await response.json();
157+
const aiMessage = {
158+
id: crypto.randomUUID(),
159+
userId: "ai",
160+
text:
161+
data.data.choices && data.data.choices[0]?.message?.content
162+
? data.data.choices[0].message.content
163+
: "An error occurred. Please try again.",
164+
timestamp: new Date(),
165+
};
166+
setAiMessages((prev) => [...prev, aiMessage]);
154167
}
155168

156169
setNewMessage("");
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import { collabServiceUri } from "@/lib/api/api-uri";
2+
3+
export const sendAiMessage = async (message: string) => {
4+
console.log(`{collabServiceUri(window.location.hostname)}/send-ai-message`);
5+
console.log(message);
6+
const response = await fetch(
7+
`${collabServiceUri(window.location.hostname)}/collab/send-ai-message`,
8+
{
9+
method: "POST",
10+
headers: {
11+
"Content-Type": "application/json",
12+
},
13+
body: JSON.stringify({ message: message }),
14+
}
15+
);
16+
return response;
17+
};

0 commit comments

Comments
 (0)