Skip to content

Commit 49d5dfa

Browse files
authored
feat: add subagents (#2928)
1 parent bab9d85 commit 49d5dfa

File tree

31 files changed

+557
-197
lines changed

31 files changed

+557
-197
lines changed

apps/web/client/public/onlook-preload-script.js

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

apps/web/client/src/app/api/chat/helpers/stream.ts

Lines changed: 3 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,48 +1,8 @@
11
import type { ToolCall } from '@ai-sdk/provider-utils';
2-
import { getAskModeSystemPrompt, getCreatePageSystemPrompt, getSystemPrompt, initModel } from '@onlook/ai';
3-
import { ChatType, LLMProvider, OPENROUTER_MODELS, type ModelConfig } from '@onlook/models';
2+
import { initModel } from '@onlook/ai';
3+
import { LLMProvider, OPENROUTER_MODELS } from '@onlook/models';
44
import { generateObject, NoSuchToolError, type ToolSet } from 'ai';
55

6-
export async function getModelFromType(chatType: ChatType) {
7-
let model: ModelConfig;
8-
switch (chatType) {
9-
case ChatType.CREATE:
10-
case ChatType.FIX:
11-
model = await initModel({
12-
provider: LLMProvider.OPENROUTER,
13-
model: OPENROUTER_MODELS.OPEN_AI_GPT_5,
14-
});
15-
break;
16-
case ChatType.ASK:
17-
case ChatType.EDIT:
18-
default:
19-
model = await initModel({
20-
provider: LLMProvider.OPENROUTER,
21-
model: OPENROUTER_MODELS.CLAUDE_4_SONNET,
22-
});
23-
break;
24-
}
25-
return model;
26-
}
27-
28-
export function getSystemPromptFromType(chatType: ChatType) {
29-
let systemPrompt: string;
30-
31-
switch (chatType) {
32-
case ChatType.CREATE:
33-
systemPrompt = getCreatePageSystemPrompt();
34-
break;
35-
case ChatType.ASK:
36-
systemPrompt = getAskModeSystemPrompt();
37-
break;
38-
case ChatType.EDIT:
39-
default:
40-
systemPrompt = getSystemPrompt();
41-
break;
42-
}
43-
return systemPrompt;
44-
}
45-
466

477
export const repairToolCall = async ({ toolCall, tools, error }: { toolCall: ToolCall<string, unknown>, tools: ToolSet, error: Error }) => {
488
if (NoSuchToolError.isInstance(error)) {
@@ -60,7 +20,7 @@ export const repairToolCall = async ({ toolCall, tools, error }: { toolCall: Too
6020
`Invalid parameter for tool ${toolCall.toolName} with args ${JSON.stringify(toolCall.input)}, attempting to fix`,
6121
);
6222

63-
const { model } = await initModel({
23+
const { model } = initModel({
6424
provider: LLMProvider.OPENROUTER,
6525
model: OPENROUTER_MODELS.OPEN_AI_GPT_5_NANO,
6626
});

apps/web/client/src/app/api/chat/route.ts

Lines changed: 34 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
import { api } from '@/trpc/server';
22
import { trackEvent } from '@/utils/analytics/server';
3-
import { AgentStreamer, RootAgent } from '@onlook/ai';
3+
import { AgentStreamer, BaseAgent, RootAgent, UserAgent } from '@onlook/ai';
44
import { toDbMessage } from '@onlook/db';
5-
import { ChatType, type ChatMessage } from '@onlook/models';
5+
import { AgentType, ChatType } from '@onlook/models';
66
import { type NextRequest } from 'next/server';
77
import { v4 as uuidv4 } from 'uuid';
88
import { checkMessageLimit, decrementUsage, errorHandler, getSupabaseUser, incrementUsage, repairToolCall } from './helpers';
9+
import { z } from 'zod';
910

1011
export async function POST(req: NextRequest) {
1112
try {
@@ -51,14 +52,24 @@ export async function POST(req: NextRequest) {
5152
}
5253
}
5354

55+
const streamResponseSchema = z.object({
56+
agentType: z.enum(AgentType).optional().default(AgentType.ROOT),
57+
messages: z.array(z.any()),
58+
chatType: z.enum(ChatType).optional(),
59+
conversationId: z.string(),
60+
projectId: z.string(),
61+
}).refine((data) => {
62+
// Only allow chatType if agentType is ROOT
63+
if (data.chatType !== undefined && data.agentType !== AgentType.ROOT) {
64+
return false;
65+
}
66+
return true;
67+
}, { message: "chatType is only allowed if agentType is root" });
68+
5469
export const streamResponse = async (req: NextRequest, userId: string) => {
5570
const body = await req.json();
56-
const { messages, chatType, conversationId, projectId } = body as {
57-
messages: ChatMessage[],
58-
chatType: ChatType,
59-
conversationId: string,
60-
projectId: string,
61-
};
71+
const { agentType, messages, chatType, conversationId, projectId } = streamResponseSchema.parse(body);
72+
6273
// Updating the usage record and rate limit is done here to avoid
6374
// abuse in the case where a single user sends many concurrent requests.
6475
// If the call below fails, the user will not be penalized.
@@ -71,12 +82,20 @@ export const streamResponse = async (req: NextRequest, userId: string) => {
7182
const lastUserMessage = messages.findLast((message) => message.role === 'user');
7283
const traceId = lastUserMessage?.id ?? uuidv4();
7384

74-
if (chatType === ChatType.EDIT) {
75-
usageRecord = await incrementUsage(req, traceId);
76-
}
77-
7885
// Create RootAgent instance
79-
const agent = await RootAgent.create(chatType);
86+
let agent: BaseAgent;
87+
if (agentType === AgentType.ROOT) {
88+
if (chatType === ChatType.EDIT) {
89+
usageRecord = await incrementUsage(req, traceId);
90+
}
91+
92+
agent = new RootAgent(chatType!);
93+
} else if (agentType === AgentType.USER) {
94+
agent = new UserAgent();
95+
} else {
96+
// agent = new WeatherAgent();
97+
throw new Error('Agent type not supported');
98+
}
8099
const streamer = new AgentStreamer(agent, conversationId);
81100

82101
return streamer.streamText(messages, {
@@ -87,7 +106,8 @@ export const streamResponse = async (req: NextRequest, userId: string) => {
87106
conversationId,
88107
projectId,
89108
userId,
90-
chatType: chatType,
109+
agentType: agentType ?? AgentType.ROOT,
110+
chatType: chatType ?? "null",
91111
tags: ['chat'],
92112
langfuseTraceId: traceId,
93113
sessionId: conversationId,

apps/web/client/src/app/project/[id]/_hooks/use-chat/index.tsx

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { useEditorEngine } from '@/components/store/editor';
44
import { handleToolCall } from '@/components/tools';
55
import { api } from '@/trpc/client';
66
import { useChat as useAiChat } from '@ai-sdk/react';
7-
import { ChatType, type ChatMessage, type ChatSuggestion } from '@onlook/models';
7+
import { AgentType, ChatType, type ChatMessage, type ChatSuggestion } from '@onlook/models';
88
import { jsonClone } from '@onlook/utility';
99
import { DefaultChatTransport, lastAssistantMessageIsCompleteWithToolCalls } from 'ai';
1010
import { usePostHog } from 'posthog-js/react';
@@ -32,6 +32,7 @@ interface UseChatProps {
3232
projectId: string;
3333
initialMessages: ChatMessage[];
3434
}
35+
const agentType = AgentType.ROOT;
3536

3637
export function useChat({ conversationId, projectId, initialMessages }: UseChatProps) {
3738
const editorEngine = useEditorEngine();
@@ -41,6 +42,7 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP
4142
const [finishReason, setFinishReason] = useState<string | null>(null);
4243
const [isExecutingToolCall, setIsExecutingToolCall] = useState(false);
4344

45+
4446
const { addToolResult, messages, error, stop, setMessages, regenerate, status } =
4547
useAiChat<ChatMessage>({
4648
id: 'user-chat',
@@ -51,11 +53,12 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP
5153
body: {
5254
conversationId,
5355
projectId,
56+
agentType,
5457
},
5558
}),
5659
onToolCall: async (toolCall) => {
5760
setIsExecutingToolCall(true);
58-
void handleToolCall(toolCall.toolCall, editorEngine, addToolResult).then(() => {
61+
void handleToolCall(agentType, toolCall.toolCall, editorEngine, addToolResult).then(() => {
5962
setIsExecutingToolCall(false);
6063
});
6164
},
@@ -89,6 +92,7 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP
8992
chatType: type,
9093
conversationId,
9194
context,
95+
agentType,
9296
},
9397
});
9498
void editorEngine.chat.conversation.generateTitle(content);
@@ -137,6 +141,7 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP
137141
body: {
138142
chatType,
139143
conversationId,
144+
agentType,
140145
},
141146
});
142147

apps/web/client/src/components/store/editor/chat/conversation.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,13 @@ export class ConversationManager {
141141
return api.chat.conversation.getAll.query({ projectId: id });
142142
}
143143

144+
async upsertConversationInStorage(conversation: Partial<ChatConversation>): Promise<ChatConversation> {
145+
return await api.chat.conversation.upsert.mutate({
146+
...conversation,
147+
projectId: this.editorEngine.projectId,
148+
});
149+
}
150+
144151
async updateConversationInStorage(conversation: Partial<ChatConversation> & { id: string }) {
145152
await api.chat.conversation.update.mutate(conversation);
146153
}

apps/web/client/src/components/tools/tools.ts

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,25 @@
11
import type { EditorEngine } from '@/components/store/editor/engine';
22
import type { ToolCall } from '@ai-sdk/provider-utils';
3-
import { getToolClassesFromType } from '@onlook/ai';
3+
import type { AbstractChat } from 'ai';
4+
import { getAvailableTools, type OnToolCallHandler } from '@onlook/ai';
45
import { toast } from '@onlook/ui/sonner';
6+
import type { AgentType } from '@onlook/models';
57

6-
export async function handleToolCall(toolCall: ToolCall<string, unknown>, editorEngine: EditorEngine, addToolResult: (toolResult: { tool: string, toolCallId: string, output: any }) => Promise<void>) {
8+
export async function handleToolCall(agentType: AgentType, toolCall: ToolCall<string, unknown>, editorEngine: EditorEngine, addToolResult: typeof AbstractChat.prototype.addToolResult) {
79
const toolName = toolCall.toolName;
810
const currentChatMode = editorEngine.state.chatMode;
9-
const availableTools = getToolClassesFromType(currentChatMode);
11+
const availableTools = getAvailableTools(agentType, currentChatMode) as any[];
1012
let output: any = null;
1113

1214
try {
13-
const tool = availableTools.find(tool => tool.toolName === toolName);
15+
const tool = availableTools.find((tool: any) => tool.toolName === toolName);
1416
if (!tool) {
1517
toast.error(`Tool "${toolName}" not available in ask mode`, {
1618
description: `Switch to build mode to use this tool.`,
1719
duration: 2000,
1820
});
1921

20-
throw new Error(`Tool "${toolName}" is not available in ${currentChatMode} mode`);
22+
throw new Error(`Tool "${toolName}" is not available in ${currentChatMode} mode!!!!`);
2123
}
2224

2325
if (!tool) {
@@ -26,8 +28,11 @@ export async function handleToolCall(toolCall: ToolCall<string, unknown>, editor
2628
// Parse the input to the tool parameters. Throws if invalid.
2729
const validatedInput = tool.parameters.parse(toolCall.input);
2830
const toolInstance = new tool();
31+
const getOnToolCall: OnToolCallHandler = (subAgentType, addSubAgentToolResult) => (toolCall) =>
32+
void handleToolCall(subAgentType, toolCall.toolCall, editorEngine, addSubAgentToolResult);
33+
2934
// Can force type with as any because we know the input is valid.
30-
output = await toolInstance.handle(validatedInput as any, editorEngine);
35+
output = await toolInstance.handle(validatedInput as any, editorEngine, getOnToolCall);
3136
} catch (error) {
3237
output = 'error handling tool call ' + error;
3338
} finally {

apps/web/client/src/server/api/routers/chat/conversation.ts

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,13 @@ export const conversationRouter = createTRPCRouter({
3636
upsert: protectedProcedure
3737
.input(conversationInsertSchema)
3838
.mutation(async ({ ctx, input }) => {
39-
const [conversation] = await ctx.db.insert(conversations).values(input).returning();
39+
const [conversation] = await ctx.db.insert(conversations).values(input).onConflictDoUpdate({
40+
target: [conversations.id],
41+
set: {
42+
...input,
43+
updatedAt: new Date(),
44+
},
45+
}).returning();
4046
if (!conversation) {
4147
throw new Error('Conversation not created');
4248
}
@@ -45,10 +51,11 @@ export const conversationRouter = createTRPCRouter({
4551
update: protectedProcedure
4652
.input(conversationUpdateSchema)
4753
.mutation(async ({ ctx, input }) => {
48-
const [conversation] = await ctx.db.update({
49-
...conversations,
50-
updatedAt: new Date(),
51-
}).set(input)
54+
const [conversation] = await ctx.db.update(conversations)
55+
.set({
56+
...input,
57+
updatedAt: new Date(),
58+
})
5259
.where(eq(conversations.id, input.id)).returning();
5360
if (!conversation) {
5461
throw new Error('Conversation not updated');
@@ -68,7 +75,7 @@ export const conversationRouter = createTRPCRouter({
6875
content: z.string(),
6976
}))
7077
.mutation(async ({ ctx, input }) => {
71-
const { model, providerOptions, headers } = await initModel({
78+
const { model, providerOptions, headers } = initModel({
7279
provider: LLMProvider.OPENROUTER,
7380
model: OPENROUTER_MODELS.CLAUDE_3_5_HAIKU,
7481
});

apps/web/client/src/server/api/routers/chat/suggestion.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ export const suggestionsRouter = createTRPCRouter({
1818
})),
1919
}))
2020
.mutation(async ({ ctx, input }) => {
21-
const { model, headers } = await initModel({
21+
const { model, headers } = initModel({
2222
provider: LLMProvider.OPENROUTER,
2323
model: OPENROUTER_MODELS.OPEN_AI_GPT_5_NANO,
2424
});

apps/web/client/src/server/api/routers/project/project.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,7 @@ export const projectRouter = createTRPCRouter({
321321
}))
322322
.mutation(async ({ ctx, input }): Promise<string> => {
323323
try {
324-
const { model, providerOptions, headers } = await initModel({
324+
const { model, providerOptions, headers } = initModel({
325325
provider: LLMProvider.OPENROUTER,
326326
model: OPENROUTER_MODELS.OPEN_AI_GPT_5_NANO,
327327
});

bun.lock

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -222,6 +222,7 @@
222222
"gpt-tokenizer": "^3.0.1",
223223
"marked": "^15.0.7",
224224
"openai": "^4.103.0",
225+
"uuid": "^11.1.0",
225226
"zod": "^4.1.3",
226227
},
227228
"devDependencies": {

0 commit comments

Comments
 (0)