@@ -2,11 +2,8 @@ import path from 'node:path';
22import fs from 'node:fs/promises' ;
33import os from 'node:os' ;
44import { DefaultAzureCredential , getBearerTokenProvider } from '@azure/identity' ;
5- import { AzureChatOpenAI } from '@langchain/openai' ;
6- import { BaseChatModel } from '@langchain/core/language_models/chat_models' ;
7- import { ChatPromptTemplate } from '@langchain/core/prompts' ;
8- import { createToolCallingAgent } from 'langchain/agents' ;
9- import { AgentExecutor } from 'langchain/agents' ;
5+ import { ChatOpenAI } from '@langchain/openai' ;
6+ import { createReactAgent } from '@langchain/langgraph/prebuilt' ;
107import { loadMcpTools } from '@langchain/mcp-adapters' ;
118import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js' ;
129import { Client } from '@modelcontextprotocol/sdk/client/index.js' ;
@@ -120,11 +117,7 @@ async function saveSession(session: SessionData): Promise<void> {
120117}
121118
122119function convertHistoryToMessages ( history : SessionData [ 'history' ] ) : BaseMessage [ ] {
123- return history . map ( msg =>
124- msg . type === 'human'
125- ? new HumanMessage ( msg . content )
126- : new AIMessage ( msg . content )
127- ) ;
120+ return history . map ( ( msg ) => ( msg . type === 'human' ? new HumanMessage ( msg . content ) : new AIMessage ( msg . content ) ) ) ;
128121}
129122
130123export async function run ( ) {
@@ -136,8 +129,6 @@ export async function run() {
136129 let client : Client | undefined ;
137130
138131 try {
139- let model : BaseChatModel ;
140-
141132 if ( ! azureOpenAiEndpoint || ! burgerMcpEndpoint ) {
142133 const errorMessage = 'Missing required environment variables: AZURE_OPENAI_API_ENDPOINT or BURGER_MCP_URL' ;
143134 console . error ( errorMessage ) ;
@@ -155,11 +146,24 @@ export async function run() {
155146 }
156147 }
157148
158- const azureADTokenProvider = getBearerTokenProvider ( new DefaultAzureCredential ( ) , 'https://cognitiveservices.azure.com/.default' ) ; ;
159-
160- model = new AzureChatOpenAI ( {
161- azureADTokenProvider,
162- azureOpenAIApiVersion : process . env . AZURE_OPENAI_API_VERSION ,
149+ const getToken = getBearerTokenProvider (
150+ new DefaultAzureCredential ( ) ,
151+ 'https://cognitiveservices.azure.com/.default' ,
152+ ) ;
153+ const model = new ChatOpenAI ( {
154+ configuration : {
155+ baseURL : azureOpenAiEndpoint ,
156+ async fetch ( url , init = { } ) {
157+ const token = await getToken ( ) ;
158+ const headers = new Headers ( init . headers ) ;
159+ headers . set ( 'Authorization' , `Bearer ${ token } ` ) ;
160+ return fetch ( url , { ...init , headers } ) ;
161+ } ,
162+ } ,
163+ modelName : process . env . AZURE_OPENAI_MODEL ?? 'gpt-5-mini' ,
164+ streaming : true ,
165+ useResponsesApi : true ,
166+ apiKey : 'not_used' ,
163167 } ) ;
164168
165169 client = new Client ( {
@@ -173,49 +177,60 @@ export async function run() {
173177 const tools = await loadMcpTools ( 'burger' , client ) ;
174178 console . log ( `Loaded ${ tools . length } tools from Burger MCP server` ) ;
175179
176- const prompt = ChatPromptTemplate . fromMessages ( [
177- [ 'system' , agentSystemPrompt + ( session . userId ? `\n\nUser ID: ${ session . userId } ` : '' ) ] ,
178- [ 'placeholder' , '{chat_history}' ] ,
179- [ 'human' , '{input}' ] ,
180- [ 'placeholder' , '{agent_scratchpad}' ] ,
181- ] ) ;
182-
183- const agent = createToolCallingAgent ( {
180+ const agent = createReactAgent ( {
184181 llm : model ,
185182 tools,
186- prompt,
187- } ) ;
188- const agentExecutor = new AgentExecutor ( {
189- agent,
190- tools,
191- returnIntermediateSteps : verbose ,
183+ prompt : agentSystemPrompt + ( session . userId ? `\n\nUser ID: ${ session . userId } ` : '' ) ,
192184 } ) ;
193185
194186 const chatHistory = convertHistoryToMessages ( session . history ) ;
195187
196- console . log ( `Thinking...` ) ;
197- const response = await agentExecutor . invoke ( {
198- input : question ,
199- chat_history : chatHistory
200- } ) ;
201-
202- if ( verbose && response . intermediateSteps && response . intermediateSteps . length > 0 ) {
203- console . log ( '--------------------\nIntermediate steps\n--------------------' ) ;
204- for ( const [ index , step ] of response . intermediateSteps . entries ( ) ) {
205- console . log ( `*** Step ${ index + 1 } ***` ) ;
206- console . log ( `Action: ${ step . action . tool } ` ) ;
207- console . log ( `Input: ${ JSON . stringify ( step . action . toolInput , null , 2 ) } ` ) ;
208- console . log ( `Output: ${ step . observation } ` ) ;
188+ console . log ( 'Thinking...\n' ) ;
189+
190+ const eventStream = await agent . streamEvents (
191+ {
192+ messages : [ ...chatHistory , new HumanMessage ( question ) ] ,
193+ } ,
194+ { version : 'v2' } ,
195+ ) ;
196+
197+ let step = 0 ;
198+ for await ( const event of eventStream ) {
199+ const data = event . data ;
200+ if ( event . event === 'on_chat_model_stream' && data ?. chunk ?. content ?. length > 0 ) {
201+ const text = data . chunk . content [ 0 ] . text ;
202+ process . stdout . write ( text ) ;
203+ } else if ( event . event === 'on_tool_end' ) {
204+ if ( verbose ) {
205+ if ( step === 0 ) {
206+ console . log ( '--------------------' ) ;
207+ console . log ( 'Intermediate steps' ) ;
208+ console . log ( '--------------------' ) ;
209+ }
210+ step ++ ;
211+ console . log ( `*** Step ${ step } ***` ) ;
212+ console . log ( `Tool: ${ event . name } ` ) ;
213+ if ( data ?. input ?. input ) {
214+ console . log ( `Input:` , data . input . input ) ;
215+ }
216+ if ( data ?. output ?. content ) {
217+ console . log ( `Output:` , data . output . content ) ;
218+ }
219+ console . log ( '--------------------\n' ) ;
220+ }
221+ } else if (
222+ event . event === 'on_chain_end' &&
223+ event . name === 'RunnableSequence' &&
224+ data . output ?. content . length > 0
225+ ) {
226+ const finalContent = data . output . content [ 0 ] . text ;
227+ if ( finalContent ) {
228+ session . history . push ( { type : 'human' , content : question } ) ;
229+ session . history . push ( { type : 'ai' , content : data . output . content [ 0 ] . text } ) ;
230+ await saveSession ( session ) ;
231+ }
209232 }
210233 }
211-
212- console . log ( '--------------------\n' + response . output ) ;
213-
214- session . history . push ( { type : 'human' , content : question } ) ;
215- session . history . push ( { type : 'ai' , content : response . output } ) ;
216-
217- await saveSession ( session ) ;
218-
219234 } catch ( _error : unknown ) {
220235 const error = _error as Error ;
221236 console . error ( `Error when processing request: ${ error . message } ` ) ;
0 commit comments