Skip to content

Commit 2bb45e6

Browse files
authored
fix: update GraphQL mutation input type and clean up comments and add… (#174)
…ing interactivity chat to do some tasks <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit - **New Features** - Enhanced chat functionality now supports saving messages with role information and retrieving current project details. - Introduced a new endpoint to fetch a project’s file structure. - Added a utility to convert XML responses into JSON for improved file handling. - New tools for managing tasks within a multi-agent system have been introduced, enhancing task execution capabilities. - Added nullable fields in chat-related data models for improved flexibility. - **Refactor** - Improved error handling and logging across chat operations for a more reliable experience. - Streamlined data handling in various components to enhance performance and maintainability. <!-- end of auto-generated comment: release notes by coderabbit.ai -->
1 parent 90a49af commit 2bb45e6

File tree

20 files changed

+1658
-169
lines changed

20 files changed

+1658
-169
lines changed

backend/src/chat/chat.controller.ts

Lines changed: 2 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,6 @@ export class ChatController {
2222
@GetAuthToken() userId: string,
2323
) {
2424
try {
25-
// Save user's message first
26-
await this.chatService.saveMessage(
27-
chatDto.chatId,
28-
chatDto.message,
29-
MessageRole.User,
30-
);
31-
3225
if (chatDto.stream) {
3326
// Streaming response
3427
res.setHeader('Content-Type', 'text/event-stream');
@@ -39,6 +32,7 @@ export class ChatController {
3932
chatId: chatDto.chatId,
4033
message: chatDto.message,
4134
model: chatDto.model,
35+
role: MessageRole.User,
4236
});
4337

4438
let fullResponse = '';
@@ -51,13 +45,6 @@ export class ChatController {
5145
}
5246
}
5347

54-
// Save the complete message
55-
await this.chatService.saveMessage(
56-
chatDto.chatId,
57-
fullResponse,
58-
MessageRole.Assistant,
59-
);
60-
6148
res.write('data: [DONE]\n\n');
6249
res.end();
6350
} else {
@@ -66,15 +53,8 @@ export class ChatController {
6653
chatId: chatDto.chatId,
6754
message: chatDto.message,
6855
model: chatDto.model,
56+
role: MessageRole.User,
6957
});
70-
71-
// Save the complete message
72-
await this.chatService.saveMessage(
73-
chatDto.chatId,
74-
response,
75-
MessageRole.Assistant,
76-
);
77-
7858
res.json({ content: response });
7959
}
8060
} catch (error) {

backend/src/chat/chat.model.ts

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -75,11 +75,11 @@ class ChatCompletionDelta {
7575

7676
@ObjectType('ChatCompletionChoiceType')
7777
class ChatCompletionChoice {
78-
@Field()
79-
index: number;
78+
@Field({ nullable: true })
79+
index: number | null;
8080

81-
@Field(() => ChatCompletionDelta)
82-
delta: ChatCompletionDelta;
81+
@Field(() => ChatCompletionDelta, { nullable: true })
82+
delta: ChatCompletionDelta | null;
8383

8484
@Field({ nullable: true })
8585
finishReason: string | null;
@@ -90,14 +90,14 @@ export class ChatCompletionChunk {
9090
@Field()
9191
id: string;
9292

93-
@Field()
94-
object: string;
93+
@Field({ nullable: true })
94+
object: string | null;
9595

96-
@Field()
97-
created: number;
96+
@Field({ nullable: true })
97+
created: number | null;
9898

99-
@Field()
100-
model: string;
99+
@Field({ nullable: true })
100+
model: string | null;
101101

102102
@Field({ nullable: true })
103103
systemFingerprint: string | null;

backend/src/chat/chat.resolver.ts

Lines changed: 58 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import { Resolver, Subscription, Args, Query, Mutation } from '@nestjs/graphql';
2-
import { Chat, ChatCompletionChunk } from './chat.model';
2+
import { Chat, ChatCompletionChunk, StreamStatus } from './chat.model';
33
import { ChatProxyService, ChatService } from './chat.service';
44
import { UserService } from 'src/user/user.service';
5-
import { Message, MessageRole } from './message.model';
5+
import { Message } from './message.model';
66
import {
77
ChatInput,
88
NewChatInput,
@@ -12,6 +12,7 @@ import { GetUserIdFromToken } from 'src/decorator/get-auth-token.decorator';
1212
import { Inject, Logger } from '@nestjs/common';
1313
import { JWTAuth } from 'src/decorator/jwt-auth.decorator';
1414
import { PubSubEngine } from 'graphql-subscriptions';
15+
import { Project } from 'src/project/project.model';
1516
@Resolver('Chat')
1617
export class ChatResolver {
1718
private readonly logger = new Logger('ChatResolver');
@@ -31,45 +32,65 @@ export class ChatResolver {
3132
resolve: (payload) => payload.chatStream,
3233
})
3334
async chatStream(@Args('input') input: ChatInput) {
34-
return this.pubSub.asyncIterator(`chat_stream_${input.chatId}`);
35+
const asyncIterator = this.pubSub.asyncIterator(
36+
`chat_stream_${input.chatId}`,
37+
);
38+
return asyncIterator;
3539
}
36-
3740
@Mutation(() => Boolean)
3841
@JWTAuth()
39-
async triggerChatStream(@Args('input') input: ChatInput): Promise<boolean> {
42+
async saveMessage(@Args('input') input: ChatInput): Promise<boolean> {
4043
try {
4144
await this.chatService.saveMessage(
4245
input.chatId,
4346
input.message,
44-
MessageRole.User,
47+
input.role,
4548
);
46-
49+
return true;
50+
} catch (error) {
51+
this.logger.error('Error in saveMessage:', error);
52+
throw error;
53+
}
54+
}
55+
@Mutation(() => Boolean)
56+
@JWTAuth()
57+
async triggerChatStream(@Args('input') input: ChatInput): Promise<boolean> {
58+
try {
4759
const iterator = this.chatProxyService.streamChat(input);
4860
let accumulatedContent = '';
4961

50-
for await (const chunk of iterator) {
51-
if (chunk) {
52-
const enhancedChunk = {
53-
...chunk,
54-
chatId: input.chatId,
55-
};
62+
try {
63+
for await (const chunk of iterator) {
64+
console.log('received chunk:', chunk);
65+
if (chunk) {
66+
const enhancedChunk = {
67+
...chunk,
68+
chatId: input.chatId,
69+
};
70+
71+
await this.pubSub.publish(`chat_stream_${input.chatId}`, {
72+
chatStream: enhancedChunk,
73+
});
74+
75+
if (chunk.choices?.[0]?.delta?.content) {
76+
accumulatedContent += chunk.choices[0].delta.content;
77+
}
78+
}
79+
}
80+
} finally {
81+
const finalChunk = await iterator.return();
82+
console.log('finalChunk:', finalChunk);
5683

84+
if (finalChunk.value?.status === StreamStatus.DONE) {
5785
await this.pubSub.publish(`chat_stream_${input.chatId}`, {
58-
chatStream: enhancedChunk,
86+
chatStream: {
87+
...finalChunk.value,
88+
chatId: input.chatId,
89+
},
5990
});
60-
61-
if (chunk.choices[0]?.delta?.content) {
62-
accumulatedContent += chunk.choices[0].delta.content;
63-
}
6491
}
6592
}
6693

67-
await this.chatService.saveMessage(
68-
input.chatId,
69-
accumulatedContent,
70-
MessageRole.Assistant,
71-
);
72-
7394
return true;
7495
} catch (error) {
7596
this.logger.error('Error in triggerChatStream:', error);
@@ -108,6 +129,19 @@ export class ChatResolver {
108129
return this.chatService.getChatDetails(chatId);
109130
}
110131

132+
@JWTAuth()
133+
@Query(() => Project, { nullable: true })
134+
async getCurProject(@Args('chatId') chatId: string): Promise<Project> {
135+
try {
136+
const response = await this.chatService.getProjectByChatId(chatId);
137+
this.logger.log('Loaded project:', response);
138+
return response;
139+
} catch (error) {
140+
this.logger.error('Failed to fetch project:', error);
141+
throw new Error('Failed to fetch project');
142+
}
143+
}
144+
111145
@Mutation(() => Chat)
112146
@JWTAuth()
113147
async createChat(

backend/src/chat/chat.service.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ import {
1111
} from 'src/chat/dto/chat.input';
1212
import { CustomAsyncIterableIterator } from 'src/common/model-provider/types';
1313
import { OpenAIModelProvider } from 'src/common/model-provider/openai-model-provider';
14+
import { Project } from 'src/project/project.model';
1415

1516
@Injectable()
1617
export class ChatProxyService {
@@ -98,6 +99,15 @@ export class ChatService {
9899
return chat;
99100
}
100101

102+
async getProjectByChatId(chatId: string): Promise<Project> {
103+
const chat = await this.chatRepository.findOne({
104+
where: { id: chatId, isDeleted: false },
105+
relations: ['project'],
106+
});
107+
108+
return chat ? chat.project : null;
109+
}
110+
101111
async createChat(userId: string, newChatInput: NewChatInput): Promise<Chat> {
102112
const user = await this.userRepository.findOne({ where: { id: userId } });
103113
if (!user) {

backend/src/chat/dto/chat.input.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
// DTOs for Project APIs
22
import { InputType, Field } from '@nestjs/graphql';
3+
import { MessageRole } from '../message.model';
34

45
@InputType()
56
export class NewChatInput {
@@ -26,4 +27,6 @@ export class ChatInput {
2627

2728
@Field()
2829
model: string;
30+
@Field()
31+
role: MessageRole;
2932
}

backend/src/common/model-provider/openai-model-provider.ts

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ export class OpenAIModelProvider implements IModelProvider {
118118
let streamIterator: AsyncIterator<OpenAIChatCompletionChunk> | null = null;
119119
const modelName = model || input.model;
120120
const queue = this.getQueueForModel(modelName);
121-
121+
let oldStreamValue: OpenAIChatCompletionChunk | null = null;
122122
const createStream = async () => {
123123
if (!stream) {
124124
const result = await queue.add(async () => {
@@ -145,6 +145,9 @@ export class OpenAIModelProvider implements IModelProvider {
145145
const currentIterator = await createStream();
146146
const chunk = await currentIterator.next();
147147
const chunkValue = chunk.value as OpenAIChatCompletionChunk;
148+
console.log('isDone:', chunk.done);
149+
console.log('chunk:', chunk);
150+
if (!chunk.done) oldStreamValue = chunkValue;
148151
return {
149152
done: chunk.done,
150153
value: {
@@ -159,9 +162,23 @@ export class OpenAIModelProvider implements IModelProvider {
159162
}
160163
},
161164
async return() {
165+
console.log(stream);
166+
console.log(streamIterator);
167+
console.log('return() called');
162168
stream = null;
163169
streamIterator = null;
164-
return { done: true, value: undefined };
170+
return {
171+
done: true,
172+
value: {
173+
...oldStreamValue,
174+
status: StreamStatus.DONE,
175+
choices: [
176+
{
177+
finishReason: 'stop',
178+
},
179+
],
180+
},
181+
};
165182
},
166183
async throw(error) {
167184
stream = null;

frontend/src/api/ChatStreamAPI.ts

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
import { ChatInputType } from '@/graphql/type';
2+
3+
export const startChatStream = async (
4+
input: ChatInputType,
5+
token: string,
6+
stream: boolean = false // Default to non-streaming for better performance
7+
): Promise<string> => {
8+
if (!token) {
9+
throw new Error('Not authenticated');
10+
}
11+
const { chatId, message, model } = input;
12+
const response = await fetch('/api/chat', {
13+
method: 'POST',
14+
headers: {
15+
'Content-Type': 'application/json',
16+
Authorization: `Bearer ${token}`,
17+
},
18+
body: JSON.stringify({
19+
chatId,
20+
message,
21+
model,
22+
stream,
23+
}),
24+
});
25+
26+
if (!response.ok) {
27+
throw new Error(
28+
`Network response was not ok: ${response.status} ${response.statusText}`
29+
);
30+
}
31+
// TODO: Handle streaming responses properly
32+
// if (stream) {
33+
// // For streaming responses, aggregate the streamed content
34+
// let fullContent = '';
35+
// const reader = response.body?.getReader();
36+
// if (!reader) {
37+
// throw new Error('No reader available');
38+
// }
39+
40+
// while (true) {
41+
// const { done, value } = await reader.read();
42+
// if (done) break;
43+
44+
// const text = new TextDecoder().decode(value);
45+
// const lines = text.split('\n\n');
46+
47+
// for (const line of lines) {
48+
// if (line.startsWith('data: ')) {
49+
// const data = line.slice(5);
50+
// if (data === '[DONE]') break;
51+
// try {
52+
// const { content } = JSON.parse(data);
53+
// if (content) {
54+
// fullContent += content;
55+
// }
56+
// } catch (e) {
57+
// console.error('Error parsing SSE data:', e);
58+
// }
59+
// }
60+
// }
61+
// }
62+
// return fullContent;
63+
// } else {
64+
// // For non-streaming responses, return the content directly
65+
// const data = await response.json();
66+
// return data.content;
67+
// }
68+
69+
const data = await response.json();
70+
return data.content;
71+
};

0 commit comments

Comments
 (0)