diff --git a/.env.template b/.env.template index 2382458..4501565 100644 --- a/.env.template +++ b/.env.template @@ -45,14 +45,19 @@ COSMOSDB_CONTAINER_NAME="kabuto" COSMOSDB_PARTITION_KEY="/id" # SQL Database Settings -SQL_DATABASE_URI="sqlite:///template_langgraph.db" -# SQL_DATABASE_URI="postgresql://user:password@localhost:5432/db" +SQL_DATABASE_URI="" +# SQL_DATABASE_URI="sqlite:///template_langgraph.db" # SQLite +# SQL_DATABASE_URI="postgresql://user:password@localhost:5432/db" # PostgreSQL # Azure AI Search Settings AI_SEARCH_ENDPOINT="https://xxx.search.windows.net/" AI_SEARCH_KEY="xxx" AI_SEARCH_INDEX_NAME="kabuto" +# MCP Settings +MCP_CONFIG_PATH="" +# MCP_CONFIG_PATH="./.vscode/mcp.json" # VS Code + # --------- # Internals # --------- diff --git a/.vscode/mcp.json b/.vscode/mcp.json new file mode 100644 index 0000000..b31fa9f --- /dev/null +++ b/.vscode/mcp.json @@ -0,0 +1,16 @@ +{ + "servers": { + "playwright": { + "command": "npx", + "args": ["-y", "@playwright/mcp@latest", "--vision"] + }, + "filesystem": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-filesystem", "./assets"] + }, + "everything": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-everything"] + } + } +} diff --git a/Makefile b/Makefile index 9e639a5..117e6cc 100644 --- a/Makefile +++ b/Makefile @@ -133,6 +133,10 @@ streamlit: ## run Streamlit uv run streamlit run \ template_langgraph/services/streamlits/main.py +.PHONY: mcp-insppector +mcp-inspector: ## run MCP Inspector server + npx -y @modelcontextprotocol/inspector + # --- # Project / Create indices # --- diff --git a/docs/references.md b/docs/references.md index 706c760..b7abf74 100644 --- a/docs/references.md +++ b/docs/references.md @@ -7,6 +7,8 @@ - [🤖 LangGraph Multi-Agent Supervisor](https://github.com/langchain-ai/langgraph-supervisor-py) - [Software Design 誌「実践 LLM アプリケーション開発」第 24 回サンプルコード](https://github.com/mahm/softwaredesign-llm-application/tree/main/24) - [Streamlit](https://python.langchain.com/docs/integrations/callbacks/streamlit/) +- [LangChain MCP Adapters](https://github.com/langchain-ai/langchain-mcp-adapters) + - [Research Agent with MCP Integration.](https://github.com/langchain-ai/deep_research_from_scratch/blob/main/src/deep_research_from_scratch/research_agent_mcp.py) ### Sample Codes diff --git a/pyproject.toml b/pyproject.toml index 01ec327..d4d1a2d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,7 @@ dependencies = [ "httpx>=0.28.1", "langchain-azure-ai>=0.1.4", "langchain-community>=0.3.27", + "langchain-mcp-adapters>=0.1.9", "langchain-ollama>=0.3.6", "langchain-openai>=0.3.28", "langchain-text-splitters>=0.3.9", diff --git a/template_langgraph/agents/chat_with_tools_agent/agent.py b/template_langgraph/agents/chat_with_tools_agent/agent.py index 8ab5b69..7d88c14 100644 --- a/template_langgraph/agents/chat_with_tools_agent/agent.py +++ b/template_langgraph/agents/chat_with_tools_agent/agent.py @@ -1,3 +1,4 @@ +import asyncio import json from langchain_core.messages import ToolMessage @@ -6,7 +7,7 @@ from template_langgraph.agents.chat_with_tools_agent.models import AgentState from template_langgraph.llms.azure_openais import AzureOpenAiWrapper from template_langgraph.loggers import get_logger -from template_langgraph.tools.common import get_default_tools +from template_langgraph.tools.common import get_default_tools, is_async_call_required logger = get_logger(__name__) @@ -25,10 +26,13 @@ def __call__(self, inputs: dict): outputs = [] for tool_call in message.tool_calls: try: - tool_result = self.tools_by_name[tool_call["name"]].invoke(tool_call["args"]) + if is_async_call_required(tool_call["name"]): + observation = asyncio.run(self.tools_by_name[tool_call["name"]].ainvoke(tool_call["args"])) + else: + observation = self.tools_by_name[tool_call["name"]].invoke(tool_call["args"]) outputs.append( ToolMessage( - content=json.dumps(tool_result.__str__(), ensure_ascii=False), + content=json.dumps(observation.__str__(), ensure_ascii=False), name=tool_call["name"], tool_call_id=tool_call["id"], ) diff --git a/template_langgraph/tools/common.py b/template_langgraph/tools/common.py index eed5c03..b43a8bd 100644 --- a/template_langgraph/tools/common.py +++ b/template_langgraph/tools/common.py @@ -4,6 +4,7 @@ from template_langgraph.tools.cosmosdb_tool import search_cosmosdb from template_langgraph.tools.dify_tool import run_dify_workflow from template_langgraph.tools.elasticsearch_tool import search_elasticsearch +from template_langgraph.tools.mcp_tool import McpClientWrapper from template_langgraph.tools.qdrant_tool import search_qdrant from template_langgraph.tools.sql_database_tool import SqlDatabaseClientWrapper @@ -11,17 +12,27 @@ def get_default_tools(): - try: - sql_database_tools = SqlDatabaseClientWrapper().get_tools( + return ( + [ + search_ai_search, + search_cosmosdb, + run_dify_workflow, + search_qdrant, + search_elasticsearch, + ] + + SqlDatabaseClientWrapper().get_tools( llm=AzureOpenAiWrapper().chat_model, ) - except Exception as e: - logger.error(f"Error occurred while getting SQL database tools: {e}") - sql_database_tools = [] - return [ - search_ai_search, - search_cosmosdb, - run_dify_workflow, - search_qdrant, - search_elasticsearch, - ] + sql_database_tools + + McpClientWrapper().get_tools() + ) + + +def is_async_call_required(tool_name: str) -> bool: + # FIXME: adhoc impl + if tool_name.startswith("browser_"): + return True + return tool_name in [ + "echo", + "add", + # add async tool names here + ] diff --git a/template_langgraph/tools/mcp_tool.py b/template_langgraph/tools/mcp_tool.py new file mode 100644 index 0000000..a0a1760 --- /dev/null +++ b/template_langgraph/tools/mcp_tool.py @@ -0,0 +1,44 @@ +import asyncio +import json +from functools import lru_cache + +from langchain_core.tools.base import BaseTool +from langchain_mcp_adapters.client import MultiServerMCPClient +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + mcp_config_path: str = "" + + model_config = SettingsConfigDict( + env_file=".env", + env_ignore_empty=True, + extra="ignore", + ) + + +@lru_cache +def get_mcp_settings() -> Settings: + """Get mcp settings.""" + return Settings() + + +class McpClientWrapper: + def __init__( + self, + settings: Settings = None, + ): + if settings is None: + settings = get_mcp_settings() + self.settings = settings + + def get_tools(self) -> list[BaseTool]: + if self.settings.mcp_config_path == "": + return [] + with open(self.settings.mcp_config_path) as f: + config = json.load(f) + for _, value in config["servers"].items(): + value["transport"] = "stdio" + client = MultiServerMCPClient(config["servers"]) + tools = asyncio.run(client.get_tools()) + return tools diff --git a/template_langgraph/tools/sql_database_tool.py b/template_langgraph/tools/sql_database_tool.py index 2ae10d6..0635a84 100644 --- a/template_langgraph/tools/sql_database_tool.py +++ b/template_langgraph/tools/sql_database_tool.py @@ -8,7 +8,7 @@ class Settings(BaseSettings): - sql_database_uri: str = "sqlite:///template_langgraph.db" + sql_database_uri: str = "" model_config = SettingsConfigDict( env_file=".env", @@ -30,15 +30,19 @@ def __init__( ): if settings is None: settings = get_sql_database_settings() - self.db = SQLDatabase.from_uri( - database_uri=settings.sql_database_uri, - ) + self.settings = settings def get_tools( self, llm: BaseLanguageModel, ) -> list[BaseTool]: """Get SQL Database tools.""" + if self.settings.sql_database_uri == "": + return [] + + self.db = SQLDatabase.from_uri( + database_uri=self.settings.sql_database_uri, + ) return SQLDatabaseToolkit( db=self.db, llm=llm, diff --git a/uv.lock b/uv.lock index 088a8fa..78ead63 100644 --- a/uv.lock +++ b/uv.lock @@ -1981,6 +1981,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/26/545283681ac0379d31c7ad0bac5f195e1982092d76c65ca048db9e3cec0e/langchain_core-0.3.74-py3-none-any.whl", hash = "sha256:088338b5bc2f6a66892f9afc777992c24ee3188f41cbc603d09181e34a228ce7", size = 443453, upload-time = "2025-08-07T20:47:03.853Z" }, ] +[[package]] +name = "langchain-mcp-adapters" +version = "0.1.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "mcp" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/74/e36003a43136f9095a5f968c730fbfe894f94284ebe6d2b50bb17d41b8b5/langchain_mcp_adapters-0.1.9.tar.gz", hash = "sha256:0018cf7b5f7bc4c044e05ec20fcb9ebe345311c8d1060c61d411188001ab3aab", size = 22101, upload-time = "2025-07-09T15:56:14.455Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/eb/9e98822d3db22beff44449a8f61fca208d4f59d592a7ce67ce4c400b8f8f/langchain_mcp_adapters-0.1.9-py3-none-any.whl", hash = "sha256:fd131009c60c9e5a864f96576bbe757fc1809abd604891cb2e5d6e8aebd6975c", size = 15300, upload-time = "2025-07-09T15:56:13.316Z" }, +] + [[package]] name = "langchain-ollama" version = "0.3.6" @@ -2286,6 +2300,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, ] +[[package]] +name = "mcp" +version = "1.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/a8/564c094de5d6199f727f5d9f5672dbec3b00dfafd0f67bf52d995eaa5951/mcp-1.13.0.tar.gz", hash = "sha256:70452f56f74662a94eb72ac5feb93997b35995e389b3a3a574e078bed2aa9ab3", size = 434709, upload-time = "2025-08-14T15:03:58.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/6b/46b8bcefc2ee9e2d2e8d2bd25f1c2512f5a879fac4619d716b194d6e7ccc/mcp-1.13.0-py3-none-any.whl", hash = "sha256:8b1a002ebe6e17e894ec74d1943cc09aa9d23cb931bf58d49ab2e9fa6bb17e4b", size = 160226, upload-time = "2025-08-14T15:03:56.641Z" }, +] + [[package]] name = "mdurl" version = "0.1.2" @@ -4575,9 +4611,9 @@ name = "sse-starlette" version = "2.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "anyio", marker = "python_full_version >= '3.11'" }, - { name = "starlette", marker = "python_full_version >= '3.11'" }, - { name = "uvicorn", marker = "python_full_version >= '3.11'" }, + { name = "anyio" }, + { name = "starlette" }, + { name = "uvicorn" }, ] sdist = { url = "https://files.pythonhosted.org/packages/72/fc/56ab9f116b2133521f532fce8d03194cf04dcac25f583cf3d839be4c0496/sse_starlette-2.1.3.tar.gz", hash = "sha256:9cd27eb35319e1414e3d2558ee7414487f9529ce3b3cf9b21434fd110e017169", size = 19678, upload-time = "2024-08-01T08:52:50.248Z" } wheels = [ @@ -4663,6 +4699,7 @@ dependencies = [ { name = "httpx" }, { name = "langchain-azure-ai" }, { name = "langchain-community" }, + { name = "langchain-mcp-adapters" }, { name = "langchain-ollama" }, { name = "langchain-openai" }, { name = "langchain-text-splitters" }, @@ -4708,6 +4745,7 @@ requires-dist = [ { name = "httpx", specifier = ">=0.28.1" }, { name = "langchain-azure-ai", specifier = ">=0.1.4" }, { name = "langchain-community", specifier = ">=0.3.27" }, + { name = "langchain-mcp-adapters", specifier = ">=0.1.9" }, { name = "langchain-ollama", specifier = ">=0.3.6" }, { name = "langchain-openai", specifier = ">=0.3.28" }, { name = "langchain-text-splitters", specifier = ">=0.3.9" },