Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 44 additions & 18 deletions examples/basic/mcp_server_aggregator/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,16 @@

app = MCPApp(name="mcp_server_aggregator")


async def example_usage_persistent():
@app.tool
async def example_usage_persistent()->str:
'''
this example function/tool call will use an MCP aggregator
to connect to both the file and filesystem servers and
aggregate them together, so you can list all tool calls from
both servers at once. The connections to the servers will
be persistent.
'''
result=""
context = app.context

logger = get_logger("mcp_server_aggregator.example_usage_persistent")
Expand All @@ -26,31 +34,43 @@ async def example_usage_persistent():
)
# Call list_tools on the aggregator, which will search all servers for the tool
logger.info("Aggregator: Calling list_tools...")
result = await aggregator.list_tools()
logger.info("Tools available:", data=result)
output = await aggregator.list_tools()
logger.info("Tools available:", data=output)
result+="Tools available:"+str(output)

# Call read_file on the aggregator, which will search all servers for the tool
result = await aggregator.call_tool(
name="read_file",
output = await aggregator.call_tool(
name="read_text_file",
arguments={"path": str(Path.cwd() / "README.md")},
)
logger.info("read_file result:", data=result)
logger.info("read_text_file result:", data=output)
result+="\n\nread_text_file result:" + str(output)

# Call fetch.fetch on the aggregator
# (i.e. server-namespacing -- fetch is the servername, which exposes fetch tool)
result = await aggregator.call_tool(
output = await aggregator.call_tool(
name="fetch_fetch",
arguments={"url": "https://jsonplaceholder.typicode.com/todos/1"},
)
logger.info("fetch result:", data=result)
logger.info("fetch result:", data=output)
result+=f"\n\nfetch result: {str(output)}"
except Exception as e:
logger.error("Error in example_usage_persistent:", data=e)
finally:
logger.info("Closing all server connections on aggregator...")
await aggregator.close()


async def example_usage():
return result

@app.tool
async def example_usage()->str:
'''
this example function/tool call will use an MCP aggregator
to connect to both the file and filesystem servers and
aggregate them together, so you can list all tool calls from
both servers at once.
'''
result=""
logger = get_logger("mcp_server_aggregator.example_usage")

context = app.context
Expand All @@ -67,29 +87,35 @@ async def example_usage():
)
# Call list_tools on the aggregator, which will search all servers for the tool
logger.info("Aggregator: Calling list_tools...")
result = await aggregator.list_tools()
logger.info("Tools available:", data=result)
output = await aggregator.list_tools()
logger.info("Tools available:", data=output)
result+="Tools available:"+str(output)

# Call read_file on the aggregator, which will search all servers for the tool
result = await aggregator.call_tool(
name="read_file",
output = await aggregator.call_tool(
name="read_text_file",
arguments={"path": str(Path.cwd() / "README.md")},
)
logger.info("read_file result:", data=result)
logger.info("read_text_file result:", data=output)
result+="\n\nread_text_file result:" + str(output)

# Call fetch.fetch on the aggregator
# (i.e. server-namespacing -- fetch is the servername, which exposes fetch tool)
result = await aggregator.call_tool(
output = await aggregator.call_tool(
name="fetch_fetch",
arguments={"url": "https://jsonplaceholder.typicode.com/todos/1"},
)
logger.info(f"fetch result: {str(result)}")
logger.info(f"fetch result: {str(output)}")
result+=f"\n\nfetch result: {str(output)}"
except Exception as e:
logger.error("Error in example_usage:", data=e)
finally:
logger.info("Closing all server connections on aggregator...")
await aggregator.close()

print(result)

return result

if __name__ == "__main__":
import time
Expand Down
Loading