@@ -101,6 +101,197 @@ main().catch(console.error)
101101
102102---
103103
104+ ## 🔧 API Methods
105+
106+ ### MCPAgent Methods
107+
108+ The ` MCPAgent ` class provides several methods for executing queries with different output formats:
109+
110+ #### ` run(query: string, maxSteps?: number): Promise<string> `
111+
112+ Executes a query and returns the final result as a string.
113+
114+ ``` ts
115+ const result = await agent .run (' What tools are available?' )
116+ console .log (result )
117+ ```
118+
119+ #### ` stream(query: string, maxSteps?: number): AsyncGenerator<AgentStep, string, void> `
120+
121+ Yields intermediate steps during execution, providing visibility into the agent's reasoning process.
122+
123+ ``` ts
124+ const stream = agent .stream (' Search for restaurants in Tokyo' )
125+ for await (const step of stream ) {
126+ console .log (` Tool: ${step .action .tool }, Input: ${step .action .toolInput } ` )
127+ console .log (` Result: ${step .observation } ` )
128+ }
129+ ```
130+
131+ #### ` streamEvents(query: string, maxSteps?: number): AsyncGenerator<StreamEvent, void, void> `
132+
133+ Yields fine-grained LangChain StreamEvent objects, enabling token-by-token streaming and detailed event tracking.
134+
135+ ``` ts
136+ const eventStream = agent .streamEvents (' What is the weather today?' )
137+ for await (const event of eventStream ) {
138+ // Handle different event types
139+ switch (event .event ) {
140+ case ' on_chat_model_stream' :
141+ // Token-by-token streaming from the LLM
142+ if (event .data ?.chunk ?.content ) {
143+ process .stdout .write (event .data .chunk .content )
144+ }
145+ break
146+ case ' on_tool_start' :
147+ console .log (` \n Tool started: ${event .name }` )
148+ break
149+ case ' on_tool_end' :
150+ console .log (` Tool completed: ${event .name } ` )
151+ break
152+ }
153+ }
154+ ```
155+
156+ ### Key Differences
157+
158+ - ** ` run() ` ** : Best for simple queries where you only need the final result
159+ - ** ` stream() ` ** : Best for debugging and understanding the agent's tool usage
160+ - ** ` streamEvents() ` ** : Best for real-time UI updates with token-level streaming
161+
162+ ## 🔄 AI SDK Integration
163+
164+ The library provides built-in utilities for integrating with [ Vercel AI SDK] ( https://sdk.vercel.ai/ ) , making it easy to build streaming UIs with React hooks like ` useCompletion ` and ` useChat ` .
165+
166+ ### Installation
167+
168+ ``` bash
169+ npm install ai @langchain/anthropic
170+ ```
171+
172+ ### Basic Usage
173+
174+ ``` ts
175+ import { ChatAnthropic } from ' @langchain/anthropic'
176+ import { LangChainAdapter } from ' ai'
177+ import { createReadableStreamFromGenerator , MCPAgent , MCPClient , streamEventsToAISDK } from ' mcp-use'
178+
179+ async function createApiHandler() {
180+ const config = {
181+ mcpServers: {
182+ everything: { command: ' npx' , args: [' -y' , ' @modelcontextprotocol/server-everything' ] }
183+ }
184+ }
185+
186+ const client = new MCPClient (config )
187+ const llm = new ChatAnthropic ({ model: ' claude-sonnet-4-20250514' })
188+ const agent = new MCPAgent ({ llm , client , maxSteps: 5 })
189+
190+ return async (request : { prompt: string }) => {
191+ const streamEvents = agent .streamEvents (request .prompt )
192+ const aiSDKStream = streamEventsToAISDK (streamEvents )
193+ const readableStream = createReadableStreamFromGenerator (aiSDKStream )
194+
195+ return LangChainAdapter .toDataStreamResponse (readableStream )
196+ }
197+ }
198+ ```
199+
200+ ### Enhanced Usage with Tool Visibility
201+
202+ ``` ts
203+ import { streamEventsToAISDKWithTools } from ' mcp-use'
204+
205+ async function createEnhancedApiHandler() {
206+ const config = {
207+ mcpServers: {
208+ everything: { command: ' npx' , args: [' -y' , ' @modelcontextprotocol/server-everything' ] }
209+ }
210+ }
211+
212+ const client = new MCPClient (config )
213+ const llm = new ChatAnthropic ({ model: ' claude-sonnet-4-20250514' })
214+ const agent = new MCPAgent ({ llm , client , maxSteps: 8 })
215+
216+ return async (request : { prompt: string }) => {
217+ const streamEvents = agent .streamEvents (request .prompt )
218+ // Enhanced stream includes tool usage notifications
219+ const enhancedStream = streamEventsToAISDKWithTools (streamEvents )
220+ const readableStream = createReadableStreamFromGenerator (enhancedStream )
221+
222+ return LangChainAdapter .toDataStreamResponse (readableStream )
223+ }
224+ }
225+ ```
226+
227+ ### Next.js API Route Example
228+
229+ ``` ts
230+ // pages/api/chat.ts or app/api/chat/route.ts
231+ import { ChatAnthropic } from ' @langchain/anthropic'
232+ import { LangChainAdapter } from ' ai'
233+ import { createReadableStreamFromGenerator , MCPAgent , MCPClient , streamEventsToAISDK } from ' mcp-use'
234+
235+ export async function POST(req : Request ) {
236+ const { prompt } = await req .json ()
237+
238+ const config = {
239+ mcpServers: {
240+ everything: { command: ' npx' , args: [' -y' , ' @modelcontextprotocol/server-everything' ] }
241+ }
242+ }
243+
244+ const client = new MCPClient (config )
245+ const llm = new ChatAnthropic ({ model: ' claude-sonnet-4-20250514' })
246+ const agent = new MCPAgent ({ llm , client , maxSteps: 10 })
247+
248+ try {
249+ const streamEvents = agent .streamEvents (prompt )
250+ const aiSDKStream = streamEventsToAISDK (streamEvents )
251+ const readableStream = createReadableStreamFromGenerator (aiSDKStream )
252+
253+ return LangChainAdapter .toDataStreamResponse (readableStream )
254+ }
255+ finally {
256+ await client .closeAllSessions ()
257+ }
258+ }
259+ ```
260+
261+ ### Frontend Integration
262+
263+ ``` tsx
264+ // components/Chat.tsx
265+ import { useCompletion } from ' ai/react'
266+
267+ export function Chat() {
268+ const { completion, input, handleInputChange, handleSubmit } = useCompletion ({
269+ api: ' /api/chat' ,
270+ })
271+
272+ return (
273+ <div >
274+ <div >{ completion } </div >
275+ <form onSubmit = { handleSubmit } >
276+ <input
277+ value = { input }
278+ onChange = { handleInputChange }
279+ placeholder = " Ask me anything..."
280+ />
281+ </form >
282+ </div >
283+ )
284+ }
285+ ```
286+
287+ ### Available AI SDK Utilities
288+
289+ - ** ` streamEventsToAISDK() ` ** : Converts streamEvents to basic text stream
290+ - ** ` streamEventsToAISDKWithTools() ` ** : Enhanced stream with tool usage notifications
291+ - ** ` createReadableStreamFromGenerator() ` ** : Converts async generator to ReadableStream
292+
293+ ---
294+
104295## 📂 Configuration File
105296
106297You can store servers in a JSON file:
@@ -140,6 +331,9 @@ npm install
140331npm run example:airbnb # Search accommodations with Airbnb
141332npm run example:browser # Browser automation with Playwright
142333npm run example:chat # Interactive chat with memory
334+ npm run example:stream # Demonstrate streaming methods (stream & streamEvents)
335+ npm run example:stream_events # Comprehensive streamEvents() examples
336+ npm run example:ai_sdk # AI SDK integration with streaming
143337npm run example:filesystem # File system operations
144338npm run example:http # HTTP server connection
145339npm run example:everything # Test MCP functionalities
@@ -153,6 +347,8 @@ npm run example:multi # Multiple servers in one session
153347- ** Multi-Server** : Combine multiple MCP servers (Airbnb + Browser) in a single task
154348- ** Sandboxed Execution** : Run MCP servers in isolated E2B containers
155349- ** OAuth Flows** : Authenticate with services like Linear using OAuth2
350+ - ** Streaming Methods** : Demonstrate both step-by-step and token-level streaming
351+ - ** AI SDK Integration** : Build streaming UIs with Vercel AI SDK and React hooks
156352
157353See the [ examples README] ( ./examples/README.md ) for detailed documentation and prerequisites.
158354
0 commit comments