Skip to content

Commit 617952c

Browse files
authored
Merge pull request #9 from pamelafox/infra
Add azd infrastructure for Container Apps, CosmosDB server, and Dockerfile
2 parents ea7fb18 + 9e3158f commit 617952c

29 files changed

+1973
-220
lines changed

.devcontainer/devcontainer.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,9 @@
33
"image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
44
"features": {
55
"ghcr.io/va-h/devcontainers-features/uv:1": {},
6-
"ghcr.io/devcontainers/features/node:1": { "version": "lts" }
6+
"ghcr.io/devcontainers/features/node:1": { "version": "lts" },
7+
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
8+
"ghcr.io/azure/azure-dev/azd:latest": {}
79
},
810
"postCreateCommand": "uv sync",
911
"forwardPorts": [6277, 6274],

.github/workflows/python.yaml

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
name: Python code quality
2+
3+
on:
4+
push:
5+
branches: [ main ]
6+
pull_request:
7+
branches: [ main ]
8+
9+
jobs:
10+
build:
11+
runs-on: ubuntu-latest
12+
steps:
13+
- uses: actions/checkout@v5
14+
- name: Set up Python 3
15+
uses: actions/setup-python@v6
16+
with:
17+
python-version: "3.13"
18+
- name: Install uv
19+
uses: astral-sh/setup-uv@v7
20+
- name: Install dependencies
21+
run: |
22+
uv sync
23+
- name: Lint with ruff
24+
run: |
25+
uv run ruff check .
26+
- name: Check formatting with ruff
27+
run: |
28+
uv run ruff format . --check

.pre-commit-config.yaml

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
repos:
2+
- repo: https://github.com/pre-commit/pre-commit-hooks
3+
rev: v4.5.0
4+
hooks:
5+
- id: check-yaml
6+
- id: end-of-file-fixer
7+
- id: trailing-whitespace
8+
- repo: https://github.com/astral-sh/ruff-pre-commit
9+
rev: v0.14.7
10+
hooks:
11+
# Run the linter.
12+
- id: ruff
13+
args: [ --fix ]
14+
# Run the formatter.
15+
- id: ruff-format

agents/agentframework_http.py

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -4,29 +4,23 @@
44
import logging
55
import os
66

7+
from agent_framework import ChatAgent, MCPStreamableHTTPTool
8+
from agent_framework.azure import AzureOpenAIChatClient
9+
from agent_framework.openai import OpenAIChatClient
710
from azure.identity import DefaultAzureCredential
811
from dotenv import load_dotenv
912
from rich import print
1013
from rich.logging import RichHandler
1114

12-
from agent_framework import ChatAgent, MCPStreamableHTTPTool
13-
from agent_framework.azure import AzureOpenAIChatClient
14-
from agent_framework.openai import OpenAIChatClient
15-
1615
# Configure logging
17-
logging.basicConfig(
18-
level=logging.WARNING,
19-
format="%(message)s",
20-
datefmt="[%X]",
21-
handlers=[RichHandler()]
22-
)
16+
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
2317
logger = logging.getLogger("agentframework_mcp_http")
2418

2519
# Load environment variables
2620
load_dotenv(override=True)
2721

2822
# Constants
29-
MCP_SERVER_URL = "http://localhost:8000/mcp/"
23+
MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "http://localhost:8000/mcp/")
3024

3125
# Configure chat client based on API_HOST
3226
API_HOST = os.getenv("API_HOST", "github")
@@ -59,15 +53,12 @@
5953
async def http_mcp_example() -> None:
6054
"""
6155
Demonstrate MCP integration with the local Expenses MCP server.
62-
56+
6357
Creates an agent that can help users log expenses
6458
using the Expenses MCP server at http://localhost:8000/mcp/.
6559
"""
6660
async with (
67-
MCPStreamableHTTPTool(
68-
name="Expenses MCP Server",
69-
url=MCP_SERVER_URL
70-
) as mcp_server,
61+
MCPStreamableHTTPTool(name="Expenses MCP Server", url=MCP_SERVER_URL) as mcp_server,
7162
ChatAgent(
7263
chat_client=client,
7364
name="Expenses Agent",
@@ -80,4 +71,4 @@ async def http_mcp_example() -> None:
8071

8172

8273
if __name__ == "__main__":
83-
asyncio.run(http_mcp_example())
74+
asyncio.run(http_mcp_example())

agents/agentframework_learn.py

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,22 +4,16 @@
44
import logging
55
import os
66

7+
from agent_framework import ChatAgent, MCPStreamableHTTPTool
8+
from agent_framework.azure import AzureOpenAIChatClient
9+
from agent_framework.openai import OpenAIChatClient
710
from azure.identity import DefaultAzureCredential
811
from dotenv import load_dotenv
912
from rich import print
1013
from rich.logging import RichHandler
1114

12-
from agent_framework import ChatAgent, MCPStreamableHTTPTool
13-
from agent_framework.azure import AzureOpenAIChatClient
14-
from agent_framework.openai import OpenAIChatClient
15-
1615
# Configure logging
17-
logging.basicConfig(
18-
level=logging.WARNING,
19-
format="%(message)s",
20-
datefmt="[%X]",
21-
handlers=[RichHandler()]
22-
)
16+
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
2317
logger = logging.getLogger("learn_mcp_lang")
2418

2519
# Load environment variables
@@ -59,7 +53,7 @@
5953
async def http_mcp_example() -> None:
6054
"""
6155
Demonstrate MCP integration with Microsoft Learn documentation.
62-
56+
6357
Creates an agent that can answer questions about Microsoft documentation
6458
using the Microsoft Learn MCP server.
6559
"""
@@ -81,4 +75,4 @@ async def http_mcp_example() -> None:
8175

8276

8377
if __name__ == "__main__":
84-
asyncio.run(http_mcp_example())
78+
asyncio.run(http_mcp_example())

agents/langchainv1_github.py

Lines changed: 30 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,17 @@
66

77
import asyncio
88
import os
9-
from dotenv import load_dotenv
10-
from rich import print as rprint
11-
from rich.panel import Panel
12-
from rich.console import Console
9+
1310
import azure.identity
11+
from dotenv import load_dotenv
1412
from langchain.agents import create_agent
1513
from langchain_core.messages import HumanMessage
1614
from langchain_mcp_adapters.client import MultiServerMCPClient
1715
from langchain_openai import ChatOpenAI
1816
from pydantic import SecretStr
17+
from rich import print as rprint
18+
from rich.console import Console
19+
from rich.panel import Panel
1920

2021
load_dotenv(override=True)
2122

@@ -50,57 +51,53 @@
5051
async def main():
5152
"""Create a safe research agent with filtered read-only tools"""
5253
console.print("\n[bold white on blue] LangChain Tool Filtering Demo [/bold white on blue]\n")
53-
54-
console.print(Panel.fit(
55-
"[bold cyan]GitHub Research Agent (Read-Only)[/bold cyan]\n"
56-
"Filtered to only safe search tools",
57-
border_style="cyan"
58-
))
59-
60-
mcp_client = MultiServerMCPClient({
61-
"github": {
62-
"url": "https://api.githubcopilot.com/mcp/",
63-
"transport": "streamable_http",
64-
"headers": {"Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}"},
54+
55+
console.print(
56+
Panel.fit(
57+
"[bold cyan]GitHub Research Agent (Read-Only)[/bold cyan]\nFiltered to only safe search tools",
58+
border_style="cyan",
59+
)
60+
)
61+
62+
mcp_client = MultiServerMCPClient(
63+
{
64+
"github": {
65+
"url": "https://api.githubcopilot.com/mcp/",
66+
"transport": "streamable_http",
67+
"headers": {"Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}"},
68+
}
6569
}
66-
})
67-
70+
)
71+
6872
# Get all tools and show what we're filtering out
6973
all_tools = await mcp_client.get_tools()
70-
74+
7175
console.print(f"[dim]Total tools available: {len(all_tools)}[/dim]\n")
72-
76+
7377
# Filter to ONLY read operations
74-
safe_tool_names = ['search_repositories', 'search_code', 'search_issues']
78+
safe_tool_names = ["search_repositories", "search_code", "search_issues"]
7579
filtered_tools = [t for t in all_tools if t.name in safe_tool_names]
76-
80+
7781
console.print("[bold cyan]Filtered Tools (read-only):[/bold cyan]")
7882
for tool in filtered_tools:
7983
console.print(f" ✓ {tool.name}")
80-
81-
# Show what was filtered out
82-
blocked_tools = [t for t in all_tools if 'create' in t.name or 'update' in t.name or 'fork' in t.name]
83-
if blocked_tools:
84-
console.print(f"\n[dim]Blocked tools ({len(blocked_tools)}): " + ", ".join([t.name for t in blocked_tools[:5]]) + "...[/dim]")
85-
8684
console.print()
87-
85+
8886
# Create agent with filtered tools
8987
agent = create_agent(
9088
model,
9189
tools=filtered_tools,
92-
prompt="You help users research GitHub repositories. Search and analyze information."
90+
prompt="You help users research GitHub repositories. Search and analyze information.",
9391
)
94-
92+
9593
query = "Find popular Python MCP server repositories"
9694
rprint(f"[bold]Query:[/bold] {query}\n")
97-
95+
9896
try:
9997
result = await agent.ainvoke({"messages": [HumanMessage(content=query)]})
10098
rprint(f"[bold green]Result:[/bold green]\n{result['messages'][-1].content}\n")
10199
except Exception as e:
102100
rprint(f"[bold red]Error:[/bold red] {str(e)}\n")
103-
104101

105102

106103
if __name__ == "__main__":

agents/langchainv1_http.py

Lines changed: 7 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -13,28 +13,21 @@
1313
from rich.logging import RichHandler
1414

1515
# Configure logging
16-
logging.basicConfig(
17-
level=logging.WARNING,
18-
format="%(message)s",
19-
datefmt="[%X]",
20-
handlers=[RichHandler()]
21-
)
16+
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
2217
logger = logging.getLogger("itinerario_lang")
2318

2419
# Load environment variables
2520
load_dotenv(override=True)
2621

2722
# Constants
28-
MCP_SERVER_URL = "http://localhost:8000/mcp/"
29-
AZURE_COGNITIVE_SERVICES_SCOPE = "https://cognitiveservices.azure.com/.default"
23+
MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "http://localhost:8000/mcp/")
3024

3125
# Configure language model based on API_HOST
3226
API_HOST = os.getenv("API_HOST", "github")
3327

3428
if API_HOST == "azure":
3529
token_provider = azure.identity.get_bearer_token_provider(
36-
azure.identity.DefaultAzureCredential(),
37-
AZURE_COGNITIVE_SERVICES_SCOPE
30+
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
3831
)
3932
base_model = ChatOpenAI(
4033
model=os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"),
@@ -80,13 +73,10 @@ async def run_agent() -> None:
8073
user_query = "yesterday I bought a laptop for $1200 using my visa."
8174

8275
# Invoke agent
83-
response = await agent.ainvoke({
84-
"messages": [
85-
SystemMessage(content=f"Today's date is {today}."),
86-
HumanMessage(content=user_query)
87-
]
88-
})
89-
76+
response = await agent.ainvoke(
77+
{"messages": [SystemMessage(content=f"Today's date is {today}."), HumanMessage(content=user_query)]}
78+
)
79+
9080
# Display result
9181
final_response = response["messages"][-1].content
9282
print(final_response)

azure.yaml

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# yaml-language-server: $schema=https://raw.githubusercontent.com/Azure/azure-dev/main/schemas/v1.0/azure.yaml.json
2+
3+
name: python-mcp-demo
4+
metadata:
5+
6+
services:
7+
# Not using remoteBuild due to private endpoint usage
8+
aca:
9+
project: .
10+
language: docker
11+
host: containerapp
12+
docker:
13+
path: ./servers/Dockerfile
14+
context: .
15+
hooks:
16+
postprovision:
17+
posix:
18+
shell: sh
19+
run: ./infra/write_env.sh
20+
continueOnError: true
21+
windows:
22+
shell: pwsh
23+
run: ./infra/write_env.ps1
24+
continueOnError: true

0 commit comments

Comments
 (0)