Skip to content

Commit 51ecf1a

Browse files
authored
Add tavily examples (Azure-Samples#25)
1 parent ba0dc32 commit 51ecf1a

File tree

4 files changed

+159
-13
lines changed

4 files changed

+159
-13
lines changed

agents/agentframework_learn.py

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import annotations
2-
31
import asyncio
42
import logging
53
import os
@@ -14,17 +12,14 @@
1412

1513
# Configure logging
1614
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
17-
logger = logging.getLogger("learn_mcp_lang")
15+
logger = logging.getLogger("agentframework_learn")
16+
logger.setLevel(logging.INFO)
1817

1918
# Load environment variables
2019
load_dotenv(override=True)
2120

22-
# Constants
23-
LEARN_MCP_URL = "https://learn.microsoft.com/api/mcp"
24-
2521
# Configure chat client based on API_HOST
2622
API_HOST = os.getenv("API_HOST", "github")
27-
2823
if API_HOST == "azure":
2924
client = AzureOpenAIChatClient(
3025
credential=DefaultAzureCredential(),
@@ -50,15 +45,13 @@
5045
)
5146

5247

53-
async def http_mcp_example() -> None:
48+
async def http_mcp_example():
5449
"""
55-
Demonstrate MCP integration with Microsoft Learn documentation.
56-
5750
Creates an agent that can answer questions about Microsoft documentation
5851
using the Microsoft Learn MCP server.
5952
"""
6053
async with (
61-
MCPStreamableHTTPTool(name="Microsoft Learn MCP", url=LEARN_MCP_URL) as mcp_server,
54+
MCPStreamableHTTPTool(name="Microsoft Learn MCP", url="https://learn.microsoft.com/api/mcp") as mcp_server,
6255
ChatAgent(
6356
chat_client=client,
6457
name="DocsAgent",

agents/agentframework_tavily.py

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
import asyncio
2+
import logging
3+
import os
4+
5+
from agent_framework import ChatAgent, MCPStreamableHTTPTool
6+
from agent_framework.azure import AzureOpenAIChatClient
7+
from agent_framework.openai import OpenAIChatClient
8+
from azure.identity import DefaultAzureCredential
9+
from dotenv import load_dotenv
10+
from rich import print
11+
from rich.logging import RichHandler
12+
13+
# Configure logging
14+
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
15+
logger = logging.getLogger("agentframework_tavily")
16+
logger.setLevel(logging.INFO)
17+
18+
# Load environment variables
19+
load_dotenv(override=True)
20+
21+
# Configure chat client based on API_HOST
22+
API_HOST = os.getenv("API_HOST", "github")
23+
if API_HOST == "azure":
24+
client = AzureOpenAIChatClient(
25+
credential=DefaultAzureCredential(),
26+
deployment_name=os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"),
27+
endpoint=os.environ.get("AZURE_OPENAI_ENDPOINT"),
28+
api_version=os.environ.get("AZURE_OPENAI_VERSION"),
29+
)
30+
elif API_HOST == "github":
31+
client = OpenAIChatClient(
32+
base_url="https://models.github.ai/inference",
33+
api_key=os.environ["GITHUB_TOKEN"],
34+
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4o"),
35+
)
36+
elif API_HOST == "ollama":
37+
client = OpenAIChatClient(
38+
base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"),
39+
api_key="none",
40+
model_id=os.environ.get("OLLAMA_MODEL", "llama3.1:latest"),
41+
)
42+
else:
43+
client = OpenAIChatClient(
44+
api_key=os.environ.get("OPENAI_API_KEY"), model_id=os.environ.get("OPENAI_MODEL", "gpt-4o")
45+
)
46+
47+
48+
async def http_mcp_example():
49+
"""
50+
Creates an agent that can search the web using the Tavily MCP server.
51+
"""
52+
53+
tavily_key = os.environ["TAVILY_API_KEY"]
54+
headers = {"Authorization": f"Bearer {tavily_key}"}
55+
async with (
56+
MCPStreamableHTTPTool(name="Tavily MCP", url="https://mcp.tavily.com/mcp/", headers=headers) as mcp_server,
57+
ChatAgent(
58+
chat_client=client,
59+
name="WebSearchAgent",
60+
instructions="You search the web with Tavily and provide concise answers with links.",
61+
) as agent,
62+
):
63+
query = "What's new in Python 3.14? Include relevant links."
64+
result = await agent.run(query, tools=mcp_server)
65+
print(result)
66+
67+
68+
if __name__ == "__main__":
69+
asyncio.run(http_mcp_example())

agents/langchainv1_http.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@
1414

1515
# Configure logging
1616
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
17-
logger = logging.getLogger("itinerario_lang")
17+
logger = logging.getLogger("langchainv1_http")
18+
logger.setLevel(logging.INFO)
1819

1920
# Load environment variables
2021
load_dotenv(override=True)
@@ -88,5 +89,4 @@ def main() -> None:
8889

8990

9091
if __name__ == "__main__":
91-
logger.setLevel(logging.INFO)
9292
main()

agents/langchainv1_tavily.py

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
"""LangChain + Tavily MCP Example
2+
3+
Creates a simple research agent that uses the Tavily MCP server
4+
to search the web and answer questions with relevant links.
5+
"""
6+
7+
import asyncio
8+
import logging
9+
import os
10+
11+
import azure.identity
12+
from dotenv import load_dotenv
13+
from langchain.agents import create_agent
14+
from langchain_core.messages import HumanMessage
15+
from langchain_mcp_adapters.client import MultiServerMCPClient
16+
from langchain_openai import ChatOpenAI
17+
from pydantic import SecretStr
18+
from rich.logging import RichHandler
19+
20+
# Configure logging
21+
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
22+
logger = logging.getLogger("langchainv1_tavily")
23+
logger.setLevel(logging.INFO)
24+
25+
# Load environment variables
26+
load_dotenv(override=True)
27+
28+
api_host = os.getenv("API_HOST", "github")
29+
30+
if api_host == "azure":
31+
token_provider = azure.identity.get_bearer_token_provider(
32+
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
33+
)
34+
model = ChatOpenAI(
35+
model=os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"),
36+
base_url=os.environ["AZURE_OPENAI_ENDPOINT"] + "/openai/v1/",
37+
api_key=token_provider,
38+
)
39+
elif api_host == "github":
40+
model = ChatOpenAI(
41+
model=os.getenv("GITHUB_MODEL", "gpt-4o"),
42+
base_url="https://models.inference.ai.azure.com",
43+
api_key=SecretStr(os.environ["GITHUB_TOKEN"]),
44+
)
45+
elif api_host == "ollama":
46+
model = ChatOpenAI(
47+
model=os.environ.get("OLLAMA_MODEL", "llama3.1"),
48+
base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"),
49+
api_key=SecretStr(os.environ.get("OLLAMA_API_KEY", "none")),
50+
)
51+
else:
52+
model = ChatOpenAI(model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"))
53+
54+
55+
async def run_agent() -> None:
56+
"""Run a Tavily-backed research agent via MCP tools."""
57+
tavily_key = os.environ["TAVILY_API_KEY"]
58+
client = MultiServerMCPClient(
59+
{
60+
"tavily": {
61+
"url": "https://mcp.tavily.com/mcp/",
62+
"transport": "streamable_http",
63+
"headers": {"Authorization": f"Bearer {tavily_key}"},
64+
}
65+
}
66+
)
67+
68+
# Fetch available tools and create the agent
69+
tools = await client.get_tools()
70+
agent = create_agent(model, tools, prompt="You search the web and include relevant links in answers.")
71+
72+
query = "What's new in Python 3.14? Include relevant links."
73+
response = await agent.ainvoke({"messages": [HumanMessage(content=query)]})
74+
75+
final_response = response["messages"][-1].content
76+
print(final_response)
77+
78+
79+
def main() -> None:
80+
asyncio.run(run_agent())
81+
82+
83+
if __name__ == "__main__":
84+
main()

0 commit comments

Comments
 (0)