Skip to content

Commit 17cd991

Browse files
authored
Merge pull request #16 from Azure-Samples/pamelaschangesforsomereason
Agents + MCP examples from live stream
2 parents 4ad5afe + de42112 commit 17cd991

19 files changed

+498
-11
lines changed

.vscode/launch.json

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
{
2+
"version": "0.2.0",
3+
"configurations": [
4+
{
5+
"name": "Python: Current File",
6+
"type": "debugpy",
7+
"request": "launch",
8+
"program": "${file}",
9+
"console": "integratedTerminal",
10+
"justMyCode": false
11+
}
12+
]
13+
}

example_data/plants.pdf

362 KB
Binary file not shown.

examples/azureai_azureopenai.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
import logging
2+
import os
3+
4+
import azure.identity
5+
from azure.ai.inference import ChatCompletionsClient
6+
from azure.ai.inference.models import SystemMessage, UserMessage
7+
from dotenv import load_dotenv
8+
9+
logging.basicConfig(level=logging.DEBUG)
10+
load_dotenv(override=True)
11+
12+
client = ChatCompletionsClient(
13+
endpoint=f'{os.environ["AZURE_OPENAI_ENDPOINT"]}/openai/deployments/{os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"]}',
14+
credential=azure.identity.AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID")),
15+
credential_scopes=["https://cognitiveservices.azure.com/.default"],
16+
api_version=os.environ["AZURE_OPENAI_VERSION"],
17+
)
18+
19+
response = client.complete(
20+
messages=[
21+
SystemMessage(content="You are a helpful assistant."),
22+
UserMessage(content="What is the capital of France?"),
23+
],
24+
model=os.environ["AZURE_OPENAI_CHAT_MODEL"],
25+
)
26+
print(response.choices[0].message.content)
8.6 KB
Loading
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
2+
digraph G {
3+
graph [splines=true];
4+
node [fontname="Arial"];
5+
edge [penwidth=1.5];
6+
"__start__" [label="__start__", shape=ellipse, style=filled, fillcolor=lightblue, width=0.5, height=0.3];"__end__" [label="__end__", shape=ellipse, style=filled, fillcolor=lightblue, width=0.5, height=0.3];"Triage agent" [label="Triage agent", shape=box, style=filled, fillcolor=lightyellow, width=1.5, height=0.8];"Spanish agent" [label="Spanish agent", shape=box, style=filled, style=rounded, fillcolor=lightyellow, width=1.5, height=0.8];"__start__" [label="__start__", shape=ellipse, style=filled, fillcolor=lightblue, width=0.5, height=0.3];"__end__" [label="__end__", shape=ellipse, style=filled, fillcolor=lightblue, width=0.5, height=0.3];"Spanish agent" [label="Spanish agent", shape=box, style=filled, fillcolor=lightyellow, width=1.5, height=0.8];"get_weather" [label="get_weather", shape=ellipse, style=filled, fillcolor=lightgreen, width=0.5, height=0.3];"English agent" [label="English agent", shape=box, style=filled, style=rounded, fillcolor=lightyellow, width=1.5, height=0.8];"__start__" [label="__start__", shape=ellipse, style=filled, fillcolor=lightblue, width=0.5, height=0.3];"__end__" [label="__end__", shape=ellipse, style=filled, fillcolor=lightblue, width=0.5, height=0.3];"English agent" [label="English agent", shape=box, style=filled, fillcolor=lightyellow, width=1.5, height=0.8];"get_weather" [label="get_weather", shape=ellipse, style=filled, fillcolor=lightgreen, width=0.5, height=0.3];"__start__" -> "Triage agent";
7+
"Triage agent" -> "Spanish agent";
8+
"Spanish agent" -> "get_weather" [style=dotted, penwidth=1.5];
9+
"get_weather" -> "Spanish agent" [style=dotted, penwidth=1.5];"Spanish agent" -> "__end__";
10+
"Triage agent" -> "English agent";
11+
"English agent" -> "get_weather" [style=dotted, penwidth=1.5];
12+
"get_weather" -> "English agent" [style=dotted, penwidth=1.5];"English agent" -> "__end__";}
32.5 KB
Loading

examples/langgraph_mcp_http.py

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
# https://github.com/JRAlexander/IntroToAgents1-Oxford/blob/main/intro-langgraph/time-travel.ipynb
2+
3+
import os
4+
5+
import azure.identity
6+
from dotenv import load_dotenv
7+
from langchain_mcp_adapters.client import MultiServerMCPClient
8+
from langchain_openai import AzureChatOpenAI, ChatOpenAI
9+
from langgraph.prebuilt import create_react_agent # REACT
10+
11+
# Setup the client to use either Azure OpenAI or GitHub Models
12+
load_dotenv(override=True)
13+
API_HOST = os.getenv("API_HOST", "github")
14+
15+
if API_HOST == "azure":
16+
token_provider = azure.identity.get_bearer_token_provider(azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default")
17+
model = AzureChatOpenAI(
18+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
19+
azure_deployment=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
20+
openai_api_version=os.environ["AZURE_OPENAI_VERSION"],
21+
azure_ad_token_provider=token_provider,
22+
)
23+
else:
24+
model = ChatOpenAI(model=os.getenv("GITHUB_MODEL", "gpt-4o"), base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
25+
26+
27+
async def setup_agent():
28+
client = MultiServerMCPClient(
29+
{
30+
"itinerary": {
31+
# Make sure you start your itinerary server on port 8000
32+
"url": "http://localhost:8000/mcp/",
33+
"transport": "streamable_http",
34+
}
35+
}
36+
)
37+
38+
tools = await client.get_tools()
39+
agent = create_react_agent(model, tools)
40+
hotel_response = await agent.ainvoke({"messages": "Find me a hotel in San Francisco for 2 nights starting from 2024-01-01. I need a hotel with free WiFi and a pool."})
41+
print(hotel_response["messages"][-1].content)
42+
43+
44+
if __name__ == "__main__":
45+
import asyncio
46+
import logging
47+
48+
logging.basicConfig(level=logging.WARNING)
49+
asyncio.run(setup_agent())
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
import os
2+
3+
import azure.identity
4+
from dotenv import load_dotenv
5+
from langchain_mcp_adapters.client import MultiServerMCPClient
6+
from langchain_openai import AzureChatOpenAI, ChatOpenAI
7+
from langgraph.graph import START, MessagesState, StateGraph
8+
from langgraph.prebuilt import ToolNode, tools_condition
9+
10+
# Setup the client to use either Azure OpenAI or GitHub Models
11+
load_dotenv(override=True)
12+
API_HOST = os.getenv("API_HOST", "github")
13+
14+
if API_HOST == "azure":
15+
token_provider = azure.identity.get_bearer_token_provider(azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default")
16+
model = AzureChatOpenAI(
17+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
18+
azure_deployment=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
19+
openai_api_version=os.environ["AZURE_OPENAI_VERSION"],
20+
azure_ad_token_provider=token_provider,
21+
)
22+
else:
23+
model = ChatOpenAI(model=os.getenv("GITHUB_MODEL", "gpt-4o"), base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
24+
25+
26+
async def setup_agent():
27+
client = MultiServerMCPClient(
28+
{
29+
"weather": {
30+
# make sure you start your weather server on port 8000
31+
"url": "http://localhost:8000/mcp/",
32+
"transport": "streamable_http",
33+
}
34+
}
35+
)
36+
tools = await client.get_tools()
37+
38+
def call_model(state: MessagesState):
39+
response = model.bind_tools(tools).invoke(state["messages"])
40+
return {"messages": response}
41+
42+
builder = StateGraph(MessagesState)
43+
builder.add_node(call_model)
44+
builder.add_node(ToolNode(tools))
45+
builder.add_edge(START, "call_model")
46+
builder.add_conditional_edges(
47+
"call_model",
48+
tools_condition,
49+
)
50+
builder.add_edge("tools", "call_model")
51+
graph = builder.compile()
52+
hotel_response = await graph.ainvoke({"messages": "Find me a hotel in San Francisco for 2 nights starting from 2024-01-01. I need a hotel with free WiFi and a pool."})
53+
print(hotel_response["messages"][-1].content)
54+
image_bytes = graph.get_graph().draw_mermaid_png()
55+
with open("examples/images/langgraph_mcp_http_graph.png", "wb") as f:
56+
f.write(image_bytes)
57+
58+
59+
if __name__ == "__main__":
60+
import asyncio
61+
import logging
62+
63+
logging.basicConfig(level=logging.WARNING)
64+
asyncio.run(setup_agent())

examples/mcp_server_basic.py

Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
import random
2+
import re
3+
from dataclasses import dataclass
4+
from datetime import datetime
5+
from typing import Annotated
6+
7+
from faker import Faker
8+
from mcp.server.fastmcp import FastMCP
9+
from pydantic import Field
10+
11+
app = FastMCP()
12+
fake = Faker()
13+
14+
15+
@dataclass
16+
class Hotel:
17+
name: str
18+
address: str
19+
location: str
20+
rating: float
21+
price_per_night: float
22+
hotel_type: str
23+
amenities: list[str]
24+
available_rooms: int
25+
26+
27+
@dataclass
28+
class HotelSuggestions:
29+
hotels: list[Hotel]
30+
31+
32+
def validate_iso_date(date_str: str, param_name: str):
33+
"""
34+
Validates that a string is in ISO format (YYYY-MM-DD) and returns the parsed date.
35+
36+
Args:
37+
date_str: The date string to validate
38+
param_name: Name of the parameter for error messages
39+
40+
Returns:
41+
The parsed date object
42+
43+
Raises:
44+
ValueError: If the date is not in ISO format or is invalid
45+
"""
46+
iso_pattern = re.compile(r"^\d{4}-\d{2}-\d{2}$")
47+
if not iso_pattern.match(date_str):
48+
raise ValueError(f"{param_name} must be in ISO format (YYYY-MM-DD), got: {date_str}")
49+
50+
try:
51+
return datetime.strptime(date_str, "%Y-%m-%d").date()
52+
except ValueError as e:
53+
raise ValueError(f"Invalid {param_name}: {e}")
54+
55+
56+
@app.tool()
57+
async def suggest_hotels(
58+
location: Annotated[str, Field(description="Location (city or area) to search for hotels")],
59+
check_in: Annotated[str, Field(description="Check-in date in ISO format (YYYY-MM-DD)")],
60+
check_out: Annotated[str, Field(description="Check-out date in ISO format (YYYY-MM-DD)")],
61+
) -> HotelSuggestions:
62+
"""
63+
Suggest hotels based on location and dates.
64+
"""
65+
# Validate dates
66+
check_in_date = validate_iso_date(check_in, "check_in")
67+
check_out_date = validate_iso_date(check_out, "check_out")
68+
69+
# Ensure check_out is after check_in
70+
if check_out_date <= check_in_date:
71+
raise ValueError("check_out date must be after check_in date")
72+
73+
# Create realistic mock data for hotels
74+
hotel_types = ["Luxury", "Boutique", "Budget", "Business"]
75+
amenities = ["Free WiFi", "Pool", "Spa", "Gym", "Restaurant", "Bar", "Room Service", "Parking"]
76+
77+
# Generate a rating between 3.0 and 5.0
78+
def generate_rating():
79+
return round(random.uniform(3.0, 5.0), 1)
80+
81+
# Generate a price based on hotel type
82+
def generate_price(hotel_type):
83+
price_ranges = {
84+
"Luxury": (250, 600),
85+
"Boutique": (180, 350),
86+
"Budget": (80, 150),
87+
"Resort": (200, 500),
88+
"Business": (150, 300),
89+
}
90+
min_price, max_price = price_ranges.get(hotel_type, (100, 300))
91+
return round(random.uniform(min_price, max_price))
92+
93+
# Generate between 3 and 8 hotels
94+
num_hotels = random.randint(3, 8)
95+
hotels = []
96+
97+
neighborhoods = [
98+
"Downtown",
99+
"Historic District",
100+
"Waterfront",
101+
"Business District",
102+
"Arts District",
103+
"University Area",
104+
]
105+
106+
for i in range(num_hotels):
107+
hotel_type = random.choice(hotel_types)
108+
hotel_amenities = random.sample(amenities, random.randint(3, 6))
109+
neighborhood = random.choice(neighborhoods)
110+
111+
hotel = Hotel(
112+
name=f"{hotel_type} {['Hotel', 'Inn', 'Suites', 'Resort', 'Plaza'][random.randint(0, 4)]}",
113+
address=fake.street_address(),
114+
location=f"{neighborhood}, {location}",
115+
rating=generate_rating(),
116+
price_per_night=generate_price(hotel_type),
117+
hotel_type=hotel_type,
118+
amenities=hotel_amenities,
119+
available_rooms=random.randint(1, 15),
120+
)
121+
hotels.append(hotel)
122+
123+
# Sort by rating to show best hotels first
124+
hotels.sort(key=lambda x: x.rating, reverse=True)
125+
return HotelSuggestions(hotels=hotels)
126+
127+
128+
if __name__ == "__main__":
129+
app.run(transport="streamable-http")

examples/openai_agents_basic.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
import asyncio
2+
import logging
23
import os
34

45
import azure.identity
56
import openai
67
from agents import Agent, OpenAIChatCompletionsModel, Runner, set_tracing_disabled
78
from dotenv import load_dotenv
89

10+
logging.basicConfig(level=logging.WARNING)
911
# Disable tracing since we're not connected to a supported tracing provider
1012
set_tracing_disabled(disabled=True)
1113

@@ -23,7 +25,9 @@
2325
azure_ad_token_provider=token_provider,
2426
)
2527
MODEL_NAME = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"]
26-
28+
elif API_HOST == "ollama":
29+
client = openai.AsyncOpenAI(base_url="http://localhost:11434/v1", api_key="none")
30+
MODEL_NAME = "llama3.1:latest"
2731

2832
agent = Agent(
2933
name="Spanish tutor",

0 commit comments

Comments
 (0)