Skip to content

Commit 857feb9

Browse files
committed
Merge branch 'main' of https://github.com/MervinPraison/PraisonAI into pr-812-handoff-fix
2 parents 53558fb + d2a044d commit 857feb9

File tree

22 files changed

+1173
-95
lines changed

22 files changed

+1173
-95
lines changed
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
name: Auto Issue Comment
2+
3+
on:
4+
issues:
5+
types: [opened]
6+
7+
jobs:
8+
add-comment:
9+
runs-on: ubuntu-latest
10+
permissions:
11+
issues: write
12+
steps:
13+
- name: Add automated comment
14+
uses: actions/github-script@v7
15+
with:
16+
github-token: ${{ secrets.GH_TOKEN }}
17+
script: |
18+
github.rest.issues.createComment({
19+
issue_number: context.issue.number,
20+
owner: context.repo.owner,
21+
repo: context.repo.repo,
22+
body: '@claude review this issue and do a detailed analysis and fix this if the existing code doesn\'t have the solution implemented. Making sure it has backward compatibility, no existing features removed. After making those changes, again use multi agents to review the applied changes. Use @web to search if you dont know any information or to find the latest documentation or to find the latest version. Run the code if you think you need to run it to test it. Minimal code change to start with if required any changes.'
23+
})
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
name: Auto PR Comment
2+
3+
on:
4+
pull_request:
5+
types: [opened]
6+
7+
jobs:
8+
add-comment:
9+
runs-on: ubuntu-latest
10+
permissions:
11+
pull-requests: write
12+
steps:
13+
- name: Add automated comment
14+
uses: actions/github-script@v7
15+
with:
16+
github-token: ${{ secrets.GH_TOKEN }}
17+
script: |
18+
github.rest.issues.createComment({
19+
issue_number: context.issue.number,
20+
owner: context.repo.owner,
21+
repo: context.repo.repo,
22+
body: '@claude review this pull request and do a detailed analysis and fix this if the existing code doesn\'t have the solution implemented. Making sure it has backward compatibility, no existing features removed. After making those changes, again use multi agents to review the applied changes. Use @web to search if you dont know any information or to find the latest documentation or to find the latest version. Run the code if you think you need to run it to test it. Minimal code change to start with if required any changes.'
23+
})

.github/workflows/test-core.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ jobs:
151151
return 'INVALID_TYPE'
152152
153153
print('🔧 Direct PraisonAI API Key Check:')
154-
env_api_key = os.environ.get(\\"OPENAI_API_KEY\\", \\"NOT_SET\\")
154+
env_api_key = os.environ.get(\"OPENAI_API_KEY\", \"NOT_SET\")
155155
print(f'Environment OPENAI_API_KEY: {get_key_display_value(env_api_key)}...')
156156
157157
from praisonai import PraisonAI
@@ -163,7 +163,7 @@ jobs:
163163
164164
from praisonai.inc.models import PraisonAIModel
165165
166-
print('\\\\n🧪 Testing PraisonAIModel with explicit API key (CrewAI method):')
166+
print('\\n🧪 Testing PraisonAIModel with explicit API key (CrewAI method):')
167167
model_with_explicit_key = PraisonAIModel(
168168
model='openai/gpt-4o-mini',
169169
base_url=praisonai.config_list[0].get('base_url'),

docker/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
1616
# Install Python packages (using latest versions)
1717
RUN pip install --no-cache-dir \
1818
flask \
19-
"praisonai>=2.2.47" \
19+
"praisonai>=2.2.49" \
2020
"praisonai[api]" \
2121
gunicorn \
2222
markdown

docker/Dockerfile.chat

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
1616
# Install Python packages (using latest versions)
1717
RUN pip install --no-cache-dir \
1818
praisonai_tools \
19-
"praisonai>=2.2.47" \
19+
"praisonai>=2.2.49" \
2020
"praisonai[chat]" \
2121
"embedchain[github,youtube]"
2222

docker/Dockerfile.dev

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ RUN mkdir -p /root/.praison
2020
# Install Python packages (using latest versions)
2121
RUN pip install --no-cache-dir \
2222
praisonai_tools \
23-
"praisonai>=2.2.47" \
23+
"praisonai>=2.2.49" \
2424
"praisonai[ui]" \
2525
"praisonai[chat]" \
2626
"praisonai[realtime]" \

docker/Dockerfile.ui

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
1616
# Install Python packages (using latest versions)
1717
RUN pip install --no-cache-dir \
1818
praisonai_tools \
19-
"praisonai>=2.2.47" \
19+
"praisonai>=2.2.49" \
2020
"praisonai[ui]" \
2121
"praisonai[crewai]"
2222

docker/README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ healthcheck:
121121
## 📦 Package Versions
122122
123123
All Docker images use consistent, up-to-date versions:
124-
- PraisonAI: `>=2.2.47`
124+
- PraisonAI: `>=2.2.49`
125125
- PraisonAI Agents: `>=0.0.92`
126126
- Python: `3.11-slim`
127127

@@ -218,7 +218,7 @@ docker-compose up -d
218218
### Version Pinning
219219
To use specific versions, update the Dockerfile:
220220
```dockerfile
221-
RUN pip install "praisonai==2.2.47" "praisonaiagents==0.0.92"
221+
RUN pip install "praisonai==2.2.49" "praisonaiagents==0.0.92"
222222
```
223223

224224
## 🌐 Production Deployment
Lines changed: 197 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,197 @@
1+
"""
2+
Hosted MCP Server implementation for PraisonAI Agents.
3+
4+
This module provides a base class for creating hosted MCP servers
5+
that can handle requests and integrate with the MCP protocol.
6+
7+
Note: This is an example implementation. To use it, ensure you have installed:
8+
pip install praisonaiagents[mcp] starlette>=0.27.0
9+
"""
10+
11+
import asyncio
12+
import logging
13+
from typing import Dict, Any, Optional, List, Callable
14+
import json
15+
16+
try:
17+
from mcp.server.fastmcp import FastMCP
18+
from mcp.server import Server
19+
from starlette.applications import Starlette
20+
from starlette.requests import Request
21+
from starlette.routing import Mount, Route
22+
from mcp.server.sse import SseServerTransport
23+
import uvicorn
24+
except ImportError:
25+
raise ImportError(
26+
"MCP server dependencies not installed. "
27+
"Please install with: pip install praisonaiagents[mcp] starlette>=0.27.0"
28+
)
29+
30+
logger = logging.getLogger(__name__)
31+
32+
33+
class HostedMCPServer:
34+
"""
35+
Base class for creating hosted MCP servers.
36+
37+
This class provides a foundation for building MCP servers that can:
38+
- Handle incoming requests
39+
- Define custom tools
40+
- Support SSE transport
41+
- Be extended with custom functionality like latency tracking
42+
"""
43+
44+
def __init__(self, name: str = "hosted-mcp-server", host: str = "localhost", port: int = 8080):
45+
"""
46+
Initialize the hosted MCP server.
47+
48+
Args:
49+
name: Server name for identification
50+
host: Host to bind to (default: localhost)
51+
port: Port to listen on (default: 8080)
52+
"""
53+
self.name = name
54+
self.host = host
55+
self.port = port
56+
self.mcp = FastMCP(name)
57+
self._tools: Dict[str, Callable] = {}
58+
self._server: Optional[Server] = None
59+
self._app: Optional[Starlette] = None
60+
61+
def handle_request(self, request_data: Dict[str, Any]) -> Dict[str, Any]:
62+
"""
63+
Handle incoming MCP requests.
64+
65+
This method can be overridden in subclasses to add custom request handling,
66+
such as latency tracking, authentication, or request modification.
67+
68+
Args:
69+
request_data: The incoming request data
70+
71+
Returns:
72+
Response data
73+
"""
74+
# Default implementation - can be overridden
75+
method = request_data.get('method', '')
76+
request_id = request_data.get('id', 'unknown')
77+
78+
logger.debug(f"Handling request {request_id}: {method}")
79+
80+
# Basic response structure
81+
response = {
82+
'id': request_id,
83+
'jsonrpc': '2.0',
84+
'result': {}
85+
}
86+
87+
return response
88+
89+
def add_tool(self, func: Callable, name: Optional[str] = None, description: Optional[str] = None):
90+
"""
91+
Add a tool to the MCP server.
92+
93+
Args:
94+
func: The function to expose as a tool
95+
name: Optional name for the tool (defaults to function name)
96+
description: Optional description for the tool
97+
"""
98+
tool_name = name or func.__name__
99+
100+
# Register with FastMCP
101+
if asyncio.iscoroutinefunction(func):
102+
# Already async
103+
self.mcp.tool(name=tool_name)(func)
104+
else:
105+
# Wrap sync function in async
106+
async def async_wrapper(*args, **kwargs):
107+
return func(*args, **kwargs)
108+
async_wrapper.__name__ = func.__name__
109+
async_wrapper.__doc__ = description or func.__doc__
110+
self.mcp.tool(name=tool_name)(async_wrapper)
111+
112+
self._tools[tool_name] = func
113+
logger.info(f"Added tool: {tool_name}")
114+
115+
def create_app(self, debug: bool = False) -> Starlette:
116+
"""
117+
Create a Starlette application for serving the MCP server.
118+
119+
Args:
120+
debug: Enable debug mode
121+
122+
Returns:
123+
Starlette application instance
124+
125+
Raises:
126+
RuntimeError: If the MCP server is not properly initialized
127+
"""
128+
if not self._server:
129+
if not hasattr(self.mcp, '_mcp_server'):
130+
raise RuntimeError("MCP server not properly initialized. Ensure FastMCP is correctly set up.")
131+
self._server = self.mcp._mcp_server
132+
133+
sse = SseServerTransport("/messages/")
134+
135+
async def handle_sse(request: Request) -> None:
136+
logger.debug(f"SSE connection from {request.client}")
137+
async with sse.connect_sse(
138+
request.scope,
139+
request.receive,
140+
request._send,
141+
) as (read_stream, write_stream):
142+
await self._server.run(
143+
read_stream,
144+
write_stream,
145+
self._server.create_initialization_options(),
146+
)
147+
148+
self._app = Starlette(
149+
debug=debug,
150+
routes=[
151+
Route("/sse", endpoint=handle_sse),
152+
Mount("/messages/", app=sse.handle_post_message),
153+
],
154+
)
155+
156+
return self._app
157+
158+
def start(self, debug: bool = False, **uvicorn_kwargs):
159+
"""
160+
Start the MCP server.
161+
162+
Args:
163+
debug: Enable debug mode
164+
**uvicorn_kwargs: Additional arguments to pass to uvicorn
165+
"""
166+
app = self.create_app(debug=debug)
167+
168+
print(f"Starting {self.name} MCP server on {self.host}:{self.port}")
169+
print(f"Available tools: {', '.join(self._tools.keys())}")
170+
print(f"SSE endpoint: http://{self.host}:{self.port}/sse")
171+
172+
uvicorn.run(app, host=self.host, port=self.port, **uvicorn_kwargs)
173+
174+
async def start_async(self, debug: bool = False):
175+
"""
176+
Start the MCP server asynchronously.
177+
178+
Args:
179+
debug: Enable debug mode
180+
"""
181+
app = self.create_app(debug=debug)
182+
183+
config = uvicorn.Config(app, host=self.host, port=self.port)
184+
server = uvicorn.Server(config)
185+
186+
print(f"Starting {self.name} MCP server on {self.host}:{self.port}")
187+
print(f"Available tools: {', '.join(self._tools.keys())}")
188+
189+
await server.serve()
190+
191+
def get_tools(self) -> List[str]:
192+
"""Get list of available tool names."""
193+
return list(self._tools.keys())
194+
195+
def get_endpoint(self) -> str:
196+
"""Get the SSE endpoint URL."""
197+
return f"http://{self.host}:{self.port}/sse"

examples/python/custom_tools/mcp_server_latency_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"""
77

88
from praisonaiagents import Agent, PraisonAIAgents
9-
from praisonaiagents.mcp import HostedMCPServer
9+
from hosted_server import HostedMCPServer # Import from local file
1010
from latency_tracker_tool import tracker, get_latency_metrics
1111
import json
1212

0 commit comments

Comments
 (0)