Skip to content

Commit d645235

Browse files
committed
add reasoning example
1 parent c28021b commit d645235

File tree

10 files changed

+624
-3
lines changed

10 files changed

+624
-3
lines changed

typescript-sdk/apps/dojo/src/agents.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,9 @@ export const agentsIntegrations: AgentIntegrationConfig[] = [
135135
tool_based_generative_ui: new LangGraphHttpAgent({
136136
url: "http://localhost:8000/agent/tool_based_generative_ui",
137137
}),
138+
agentic_chat_reasoning: new LangGraphHttpAgent({
139+
url: "http://localhost:8000/agent/agentic_chat_reasoning",
140+
}),
138141
};
139142
},
140143
},
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
# 🤖 Agentic Chat with Reasoning
2+
3+
## What This Demo Shows
4+
5+
This demo showcases CopilotKit's **agentic chat** capabilities with **frontend
6+
tool integration**:
7+
8+
1. **Natural Conversation**: Chat with your Copilot in a familiar chat interface
9+
2. **Frontend Tool Execution**: The Copilot can directly interacts with your UI
10+
by calling frontend functions
11+
3. **Seamless Integration**: Tools defined in the frontend and automatically
12+
discovered and made available to the agent
13+
14+
## How to Interact
15+
16+
Try asking your Copilot to:
17+
18+
- "Can you change the background color to something more vibrant?"
19+
- "Make the background a blue to purple gradient"
20+
- "Set the background to a sunset-themed gradient"
21+
- "Change it back to a simple light color"
22+
23+
You can also chat about other topics - the agent will respond conversationally
24+
while having the ability to use your UI tools when appropriate.
25+
26+
## ✨ Frontend Tool Integration in Action
27+
28+
**What's happening technically:**
29+
30+
- The React component defines a frontend function using `useCopilotAction`
31+
- CopilotKit automatically exposes this function to the agent
32+
- When you make a request, the agent determines whether to use the tool
33+
- The agent calls the function with the appropriate parameters
34+
- The UI immediately updates in response
35+
36+
**What you'll see in this demo:**
37+
38+
- The Copilot understands requests to change the background
39+
- It generates CSS values for colors and gradients
40+
- When it calls the tool, the background changes instantly
41+
- The agent provides a conversational response about the changes it made
42+
43+
This technique of exposing frontend functions to your Copilot can be extended to
44+
any UI manipulation you want to enable, from theme changes to data filtering,
45+
navigation, or complex UI state management!
Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
"use client";
2+
import React, { useState } from "react";
3+
import "@copilotkit/react-ui/styles.css";
4+
import "./style.css";
5+
import { CopilotKit, useCoAgent, useCopilotAction, useCopilotChat } from "@copilotkit/react-core";
6+
import { CopilotChat } from "@copilotkit/react-ui";
7+
import { ChevronDown } from "lucide-react";
8+
import { Button } from "@/components/ui/button";
9+
import {
10+
DropdownMenu,
11+
DropdownMenuContent,
12+
DropdownMenuItem,
13+
DropdownMenuLabel,
14+
DropdownMenuSeparator,
15+
DropdownMenuTrigger,
16+
} from "@/components/ui/dropdown-menu";
17+
18+
interface AgenticChatProps {
19+
params: Promise<{
20+
integrationId: string;
21+
}>;
22+
}
23+
24+
const AgenticChat: React.FC<AgenticChatProps> = ({ params }) => {
25+
const { integrationId } = React.use(params);
26+
27+
return (
28+
<CopilotKit
29+
runtimeUrl={`/api/copilotkit/${integrationId}`}
30+
showDevConsole={false}
31+
// agent lock to the relevant agent
32+
agent="agentic_chat_reasoning"
33+
>
34+
<Chat />
35+
</CopilotKit>
36+
);
37+
};
38+
39+
interface AgentState {
40+
model: string;
41+
}
42+
43+
const Chat = () => {
44+
const [background, setBackground] = useState<string>("--copilot-kit-background-color");
45+
const { state: agentState, setState: setAgentState } = useCoAgent<AgentState>({
46+
name: "agentic_chat_reasoning",
47+
initialState: {
48+
model: "OpenAI",
49+
},
50+
});
51+
52+
// Initialize model if not set
53+
const selectedModel = agentState?.model || "OpenAI";
54+
55+
const handleModelChange = (model: string) => {
56+
setAgentState({ model });
57+
};
58+
59+
useCopilotAction({
60+
name: "change_background",
61+
description:
62+
"Change the background color of the chat. Can be anything that the CSS background attribute accepts. Regular colors, linear of radial gradients etc.",
63+
parameters: [
64+
{
65+
name: "background",
66+
type: "string",
67+
description: "The background. Prefer gradients.",
68+
},
69+
],
70+
handler: ({ background }) => {
71+
setBackground(background);
72+
},
73+
});
74+
75+
return (
76+
<div className="flex flex-col h-full w-full" style={{ background }}>
77+
{/* Reasoning Model Dropdown */}
78+
<div className="h-[65px] border-b border-gray-200 dark:border-gray-700">
79+
<div className="h-full flex items-center justify-center">
80+
<div className="flex items-center gap-2">
81+
<span className="text-sm font-medium text-gray-700 dark:text-gray-300">
82+
Reasoning Model:
83+
</span>
84+
<DropdownMenu>
85+
<DropdownMenuTrigger asChild>
86+
<Button variant="outline" className="w-[140px] justify-between">
87+
{selectedModel}
88+
<ChevronDown className="h-4 w-4 opacity-50" />
89+
</Button>
90+
</DropdownMenuTrigger>
91+
<DropdownMenuContent className="w-[140px]">
92+
<DropdownMenuLabel>Select Model</DropdownMenuLabel>
93+
<DropdownMenuSeparator />
94+
<DropdownMenuItem onClick={() => handleModelChange("OpenAI")}>
95+
OpenAI
96+
</DropdownMenuItem>
97+
<DropdownMenuItem onClick={() => handleModelChange("Anthropic")}>
98+
Anthropic
99+
</DropdownMenuItem>
100+
<DropdownMenuItem onClick={() => handleModelChange("Gemini")}>
101+
Gemini
102+
</DropdownMenuItem>
103+
</DropdownMenuContent>
104+
</DropdownMenu>
105+
</div>
106+
</div>
107+
</div>
108+
109+
{/* Chat Container */}
110+
<div className="flex-1 flex justify-center items-center p-4">
111+
<div className="w-8/10 h-full rounded-lg">
112+
<CopilotChat
113+
className="h-full rounded-2xl"
114+
labels={{ initial: "Hi, I'm an agent. Want to chat?" }}
115+
/>
116+
</div>
117+
</div>
118+
</div>
119+
);
120+
};
121+
122+
export default AgenticChat;
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
.copilotKitInput {
2+
border-bottom-left-radius: 0.75rem;
3+
border-bottom-right-radius: 0.75rem;
4+
border-top-left-radius: 0.75rem;
5+
border-top-right-radius: 0.75rem;
6+
border: 1px solid var(--copilot-kit-separator-color) !important;
7+
}
8+
9+
.copilotKitChat {
10+
background-color: #fff !important;
11+
}
12+

typescript-sdk/apps/dojo/src/config.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,12 @@ export const featureConfig: FeatureConfig[] = [
5353
description: "Use collaboration to edit a document in real time with your Copilot",
5454
tags: ["State", "Streaming", "Tools"],
5555
}),
56+
createFeatureConfig({
57+
id: "agentic_chat_reasoning",
58+
name: "Agentic Chat Reasoning",
59+
description: "Chat with a reasoning Copilot and call frontend tools",
60+
tags: ["Chat", "Tools", "Streaming", "Reasoning"],
61+
}),
5662
];
5763

5864
export default featureConfig;

typescript-sdk/apps/dojo/src/menu.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ export const menuIntegrations: MenuIntegrationConfig[] = [
6060
"tool_based_generative_ui",
6161
"predictive_state_updates",
6262
"shared_state",
63+
"agentic_chat_reasoning",
6364
],
6465
},
6566
{
Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
"""
2+
A simple agentic chat flow using LangGraph instead of CrewAI.
3+
"""
4+
5+
from typing import List, Any, Optional
6+
from langchain_core.runnables import RunnableConfig
7+
from langchain_core.messages import SystemMessage
8+
from langchain_openai import ChatOpenAI
9+
from langchain_anthropic import ChatAnthropic
10+
from langchain_google_genai import ChatGoogleGenerativeAI
11+
from langgraph.graph import StateGraph, END, START
12+
from langgraph.graph import MessagesState
13+
from langgraph.types import Command
14+
from langgraph.checkpoint.memory import MemorySaver
15+
16+
class AgentState(MessagesState):
17+
"""
18+
State of our graph.
19+
"""
20+
tools: List[Any]
21+
model: str
22+
23+
async def chat_node(state: AgentState, config: Optional[RunnableConfig] = None):
24+
"""
25+
Standard chat node based on the ReAct design pattern. It handles:
26+
- The model to use (and binds in CopilotKit actions and the tools defined above)
27+
- The system prompt
28+
- Getting a response from the model
29+
- Handling tool calls
30+
31+
For more about the ReAct design pattern, see:
32+
https://www.perplexity.ai/search/react-agents-NcXLQhreS0WDzpVaS4m9Cg
33+
"""
34+
35+
36+
# 1. Define the model
37+
model = ChatOpenAI(model="o3")
38+
if state["model"] == "Anthropic":
39+
model = ChatAnthropic(
40+
model="claude-sonnet-4-20250514",
41+
thinking={"type": "enabled", "budget_tokens": 2000}
42+
)
43+
elif state["model"] == "Gemini":
44+
model = ChatGoogleGenerativeAI(model="gemini-2.5-pro", thinking_budget=1024)
45+
46+
# Define config for the model
47+
if config is None:
48+
config = RunnableConfig(recursion_limit=25)
49+
50+
# 2. Bind the tools to the model
51+
model_with_tools = model.bind_tools(
52+
[
53+
*state["tools"],
54+
# your_tool_here
55+
],
56+
)
57+
58+
# 3. Define the system message by which the chat model will be run
59+
system_message = SystemMessage(
60+
content="You are a helpful assistant."
61+
)
62+
63+
# 4. Run the model to generate a response
64+
response = await model_with_tools.ainvoke([
65+
system_message,
66+
*state["messages"],
67+
], config)
68+
69+
# 6. We've handled all tool calls, so we can end the graph.
70+
return Command(
71+
goto=END,
72+
update={
73+
"messages": response
74+
}
75+
)
76+
77+
# Define a new graph
78+
workflow = StateGraph(AgentState)
79+
workflow.add_node("chat_node", chat_node)
80+
workflow.set_entry_point("chat_node")
81+
82+
# Add explicit edges, matching the pattern in other examples
83+
workflow.add_edge(START, "chat_node")
84+
workflow.add_edge("chat_node", END)
85+
86+
# Compile the graph
87+
agentic_chat_reasoning_graph = workflow.compile(checkpointer=MemorySaver())

typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/examples/agents/dojo.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from .tool_based_generative_ui import tool_based_generative_ui_graph
1313
from .agentic_chat import agentic_chat_graph
1414
from .agentic_generative_ui import graph
15+
from .agentic_chat_reasoning import agentic_chat_reasoning_graph
1516

1617
app = FastAPI(title="LangGraph Dojo Example Server")
1718

@@ -46,7 +47,12 @@
4647
name="predictive_state_updates",
4748
description="An example for a predictive state updates flow.",
4849
graph=predictive_state_updates_graph,
49-
)
50+
),
51+
"agentic_chat_reasoning": LangGraphAgent(
52+
name="agentic_chat_reasoning",
53+
description="An example for a reasoning chat.",
54+
graph=agentic_chat_reasoning_graph,
55+
),
5056
}
5157

5258
add_langgraph_fastapi_endpoint(
@@ -85,6 +91,12 @@
8591
path="/agent/predictive_state_updates"
8692
)
8793

94+
add_langgraph_fastapi_endpoint(
95+
app=app,
96+
agent=agents["agentic_chat_reasoning"],
97+
path="/agent/agentic_chat_reasoning"
98+
)
99+
88100
def main():
89101
"""Run the uvicorn server."""
90102
port = int(os.getenv("PORT", "8000"))

0 commit comments

Comments
 (0)