Skip to content

Commit 1e2f11e

Browse files
authored
Merge pull request #18 from Azure-Samples/pamelaschangesforsomereason
Add better output for GitHub MCP example
2 parents 615bb5a + ce0891f commit 1e2f11e

File tree

1 file changed

+22
-2
lines changed

1 file changed

+22
-2
lines changed

examples/langgraph_mcp_github.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
import os
22

33
import azure.identity
4+
import rich
45
from dotenv import load_dotenv
56
from langchain_mcp_adapters.client import MultiServerMCPClient
67
from langchain_openai import AzureChatOpenAI, ChatOpenAI
78
from langgraph.prebuilt import create_react_agent
8-
from rich import print
99

1010
# Setup the client to use either Azure OpenAI or GitHub Models
1111
load_dotenv(override=True)
@@ -41,8 +41,28 @@ async def setup_agent():
4141
stale_prompt_path = os.path.join(os.path.dirname(__file__), "staleprompt.md")
4242
with open(stale_prompt_path) as f:
4343
stale_prompt = f.read()
44+
final_text = ""
4445
async for event in agent.astream_events({"messages": stale_prompt + " Find one issue from Azure-samples azure-search-openai-demo that is potentially closeable."}, version="v2"):
45-
print(event)
46+
kind = event["event"]
47+
if kind == "on_chat_model_stream":
48+
# The event corresponding to a stream of new content (tokens or chunks of text)
49+
if chunk := event.get("data", {}).get("chunk"):
50+
final_text += chunk.content # Append the new content to the accumulated text
51+
52+
elif kind == "on_tool_start":
53+
# The event signals that a tool is about to be called
54+
rich.print("Called ", event["name"]) # Show which tool is being called
55+
rich.print("Tool input: ")
56+
rich.print(event["data"].get("input")) # Display the input data sent to the tool
57+
58+
elif kind == "on_tool_end":
59+
if output := event["data"].get("output"):
60+
# The event signals that a tool has finished executing
61+
rich.print("Tool output: ")
62+
rich.print(output.content)
63+
64+
rich.print("Final response:")
65+
rich.print(final_text)
4666

4767

4868
if __name__ == "__main__":

0 commit comments

Comments
 (0)