diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a8236fcc91..784ee187b9 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -29,10 +29,6 @@ jobs:
- name: Install dependencies
run: uv sync --all-extras --all-packages --group lint
- - uses: denoland/setup-deno@v2
- with:
- deno-version: v2.x
-
- uses: pre-commit/action@v3.0.0
with:
extra_args: --all-files --verbose
@@ -162,6 +158,7 @@ jobs:
- run: mkdir .coverage
+ - run: uv run mcp-run-python example --deps=numpy
- run: uv sync --only-dev
- run: uv run ${{ matrix.install.command }} coverage run -m pytest --durations=100 -n auto --dist=loadgroup
env:
@@ -200,6 +197,7 @@ jobs:
- run: mkdir .coverage
- run: uv sync --group dev
+ - run: uv run mcp-run-python example --deps=numpy
- run: unset UV_FROZEN
@@ -232,10 +230,6 @@ jobs:
with:
enable-cache: true
- - uses: denoland/setup-deno@v2
- with:
- deno-version: v2.x
-
- run: uv run --all-extras python tests/import_examples.py
coverage:
@@ -272,29 +266,6 @@ jobs:
path: htmlcov
include-hidden-files: true
- test-mcp-run-python:
- runs-on: ubuntu-latest
- timeout-minutes: 5
- env:
- UV_PYTHON: "3.12"
- steps:
- - uses: actions/checkout@v4
-
- - uses: astral-sh/setup-uv@v5
- with:
- enable-cache: true
-
- - uses: denoland/setup-deno@v2
- with:
- deno-version: v2.x
-
- - run: make lint-js
-
- - run: uv run --package mcp-run-python pytest mcp-run-python -v --durations=100
-
- - run: deno task dev warmup
- working-directory: mcp-run-python
-
# https://github.com/marketplace/actions/alls-green#why used for branch protection checks
check:
if: always()
@@ -307,7 +278,6 @@ jobs:
- test-lowest-versions
- test-examples
- coverage
- - test-mcp-run-python
runs-on: ubuntu-latest
steps:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 1891749af2..8df75892c4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -44,14 +44,6 @@ repos:
types: [python]
language: system
pass_filenames: false
- - id: lint-js
- name: Lint JS
- entry: make
- args: [lint-js]
- language: system
- types_or: [javascript, ts, json]
- files: "^mcp-run-python/"
- pass_filenames: false
- id: clai-help
name: clai help output
entry: uv
diff --git a/CLAUDE.md b/CLAUDE.md
index 557ab143e0..eca5b16d3c 100644
--- a/CLAUDE.md
+++ b/CLAUDE.md
@@ -83,7 +83,6 @@ This is a uv workspace with multiple packages:
- **`pydantic_graph/`**: Graph execution engine
- **`examples/`**: Example applications
- **`clai/`**: CLI tool
-- **`mcp-run-python/`**: MCP server implementation (Deno/TypeScript)
## Testing Strategy
diff --git a/Makefile b/Makefile
index a86fc210a0..c3e7a8484d 100644
--- a/Makefile
+++ b/Makefile
@@ -8,12 +8,8 @@
.pre-commit: ## Check that pre-commit is installed
@pre-commit -V || echo 'Please install pre-commit: https://pre-commit.com/'
-.PHONY: .deno
-.deno: ## Check that deno is installed
- @deno --version > /dev/null 2>&1 || (printf "\033[0;31m✖ Error: deno is not installed, but is needed for mcp-run-python\033[0m\n Please install deno: https://docs.deno.com/runtime/getting_started/installation/\n" && exit 1)
-
.PHONY: install
-install: .uv .pre-commit .deno ## Install the package, dependencies, and pre-commit for local development
+install: .uv .pre-commit ## Install the package, dependencies, and pre-commit for local development
uv sync --frozen --all-extras --all-packages --group lint --group docs
pre-commit install --install-hooks
@@ -38,10 +34,6 @@ lint: ## Lint the code
uv run ruff format --check
uv run ruff check
-.PHONY: lint-js
-lint-js: ## Lint JS and TS code
- cd mcp-run-python && deno task lint-format
-
.PHONY: typecheck-pyright
typecheck-pyright:
@# PYRIGHT_PYTHON_IGNORE_WARNINGS avoids the overhead of making a request to github on every invocation
@@ -77,11 +69,6 @@ testcov: test ## Run tests and generate an HTML coverage report
@echo "building coverage html"
@uv run coverage html
-.PHONY: test-mrp
-test-mrp: ## Build and tests of mcp-run-python
- cd mcp-run-python && deno task build
- uv run --package mcp-run-python pytest mcp-run-python -v
-
.PHONY: update-examples
update-examples: ## Update documentation examples
uv run -m pytest --update-examples tests/test_examples.py
diff --git a/docs/mcp/client.md b/docs/mcp/client.md
index b429902334..3d27690f24 100644
--- a/docs/mcp/client.md
+++ b/docs/mcp/client.md
@@ -19,7 +19,7 @@ Pydantic AI comes with two ways to connect to MCP servers:
- [`MCPServerSSE`][pydantic_ai.mcp.MCPServerSSE] which connects to an MCP server using the [HTTP SSE](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) transport
- [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] which runs the server as a subprocess and connects to it using the [stdio](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) transport
-Examples of all three are shown below; [mcp-run-python](run-python.md) is used as the MCP server in all examples.
+Examples of all three are shown below.
Each MCP server instance is a [toolset](../toolsets.md) and can be registered with an [`Agent`][pydantic_ai.Agent] using the `toolsets` argument.
@@ -59,9 +59,9 @@ agent = Agent('openai:gpt-4o', toolsets=[server]) # (2)!
async def main():
async with agent: # (3)!
- result = await agent.run('How many days between 2000-01-01 and 2025-03-18?')
+ result = await agent.run('What is 7 plus 5?')
print(result.output)
- #> There are 9,208 days between January 1, 2000, and March 18, 2025.
+ #> The answer is 12.
```
1. Define the MCP server with the URL used to connect.
@@ -97,19 +97,26 @@ Will display as follows:
[`MCPServerSSE`][pydantic_ai.mcp.MCPServerSSE] connects over HTTP using the [HTTP + Server Sent Events transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) to a server.
!!! note
- [`MCPServerSSE`][pydantic_ai.mcp.MCPServerSSE] requires an MCP server to be running and accepting HTTP connections before running the agent. Running the server is not managed by Pydantic AI.
+ The SSE transport in MCP is deprecated, you should use Streamable HTTP instead.
+
+Before creating the SSE client, we need to run a server that supports the SSE transport.
+
-The name "HTTP" is used since this implementation will be adapted in future to use the new
-[Streamable HTTP](https://github.com/modelcontextprotocol/specification/pull/206) currently in development.
+```python {title="sse_server.py" dunder_name="not_main"}
+from mcp.server.fastmcp import FastMCP
+
+app = FastMCP()
-Before creating the SSE client, we need to run the server (docs [here](run-python.md)):
+@app.tool()
+def add(a: int, b: int) -> int:
+ return a + b
-```bash {title="terminal (run sse server)"}
-deno run \
- -N -R=node_modules -W=node_modules --node-modules-dir=auto \
- jsr:@pydantic/mcp-run-python sse
+if __name__ == '__main__':
+ app.run(transport='sse')
```
+Then we can create the client:
+
```python {title="mcp_sse_client.py"}
from pydantic_ai import Agent
from pydantic_ai.mcp import MCPServerSSE
@@ -120,9 +127,9 @@ agent = Agent('openai:gpt-4o', toolsets=[server]) # (2)!
async def main():
async with agent: # (3)!
- result = await agent.run('How many days between 2000-01-01 and 2025-03-18?')
+ result = await agent.run('What is 7 plus 5?')
print(result.output)
- #> There are 9,208 days between January 1, 2000, and March 18, 2025.
+ #> The answer is 12.
```
1. Define the MCP server with the URL used to connect.
@@ -133,23 +140,16 @@ _(This example is complete, it can be run "as is" — you'll need to add `asynci
### MCP "stdio" Server
-The other transport offered by MCP is the [stdio transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) where the server is run as a subprocess and communicates with the client over `stdin` and `stdout`. In this case, you'd use the [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] class.
+MCP also offers [stdio transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) where the server is run as a subprocess and communicates with the client over `stdin` and `stdout`. In this case, you'd use the [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] class.
+
+In this example [mcp-run-python](https://github.com/pydantic/mcp-run-python) is used as the MCP server.
```python {title="mcp_stdio_client.py"}
from pydantic_ai import Agent
from pydantic_ai.mcp import MCPServerStdio
server = MCPServerStdio( # (1)!
- 'deno',
- args=[
- 'run',
- '-N',
- '-R=node_modules',
- '-W=node_modules',
- '--node-modules-dir=auto',
- 'jsr:@pydantic/mcp-run-python',
- 'stdio',
- ]
+ 'uv', args=['run', 'mcp-run-python', 'stdio'], timeout=10
)
agent = Agent('openai:gpt-4o', toolsets=[server])
@@ -161,7 +161,7 @@ async def main():
#> There are 9,208 days between January 1, 2000, and March 18, 2025.
```
-1. See [MCP Run Python](run-python.md) for more information.
+1. See [MCP Run Python](https://github.com/pydantic/mcp-run-python) for more information.
## Tool call customisation
diff --git a/docs/mcp/overview.md b/docs/mcp/overview.md
index ffaa9a7857..5baff413ca 100644
--- a/docs/mcp/overview.md
+++ b/docs/mcp/overview.md
@@ -1,10 +1,9 @@
# Model Context Protocol (MCP)
-Pydantic AI supports [Model Context Protocol (MCP)](https://modelcontextprotocol.io) in three ways:
+Pydantic AI supports [Model Context Protocol (MCP)](https://modelcontextprotocol.io) in two ways:
1. [Agents](../agents.md) act as an MCP Client, connecting to MCP servers to use their tools, [learn more …](client.md)
2. Agents can be used within MCP servers, [learn more …](server.md)
-3. As part of Pydantic AI, we're building a number of MCP servers, [see below](#mcp-servers)
## What is MCP?
@@ -18,12 +17,4 @@ Some examples of what this means:
- Pydantic AI could use a web search service implemented as an MCP server to implement a deep research agent
- Cursor could connect to the [Pydantic Logfire](https://github.com/pydantic/logfire-mcp) MCP server to search logs, traces and metrics to gain context while fixing a bug
-- Pydantic AI, or any other MCP client could connect to our [Run Python](run-python.md) MCP server to run arbitrary Python code in a sandboxed environment
-
-## MCP Servers
-
-To add functionality to Pydantic AI while making it as widely usable as possible, we're implementing some functionality as MCP servers.
-
-So far, we've only implemented one MCP server as part of Pydantic AI:
-
-- [Run Python](run-python.md): A sandboxed Python interpreter that can run arbitrary code, with a focus on security and safety.
+- Pydantic AI, or any other MCP client could connect to our [Run Python](https://github.com/pydantic/mcp-run-python) MCP server to run arbitrary Python code in a sandboxed environment
diff --git a/docs/mcp/run-python.md b/docs/mcp/run-python.md
deleted file mode 100644
index ea72d769a6..0000000000
--- a/docs/mcp/run-python.md
+++ /dev/null
@@ -1,181 +0,0 @@
-# MCP Run Python
-
-The **MCP Run Python** package is an MCP server that allows agents to execute Python code in a secure, sandboxed environment. It uses [Pyodide](https://pyodide.org/) to run Python code in a JavaScript environment with [Deno](https://deno.com/), isolating execution from the host system.
-
-## Features
-
-- **Secure Execution**: Run Python code in a sandboxed WebAssembly environment
-- **Package Management**: Automatically detects and installs required dependencies
-- **Complete Results**: Captures standard output, standard error, and return values
-- **Asynchronous Support**: Runs async code properly
-- **Error Handling**: Provides detailed error reports for debugging
-
-## Installation
-
-!!! warning "Switch from npx to deno"
- We previously distributed `mcp-run-python` as an `npm` package to use via `npx`.
- We now recommend using `deno` instead as it provides better sandboxing and security.
-
-The MCP Run Python server is distributed as a [JSR package](https://jsr.io/@pydantic/mcp-run-python) and can be run directly using [`deno run`](https://deno.com/):
-
-```bash {title="terminal"}
-deno run \
- -N -R=node_modules -W=node_modules --node-modules-dir=auto \
- jsr:@pydantic/mcp-run-python [stdio|streamable_http|sse|warmup]
-```
-
-where:
-
-- `-N -R=node_modules -W=node_modules` (alias of
- `--allow-net --allow-read=node_modules --allow-write=node_modules`) allows
- network access and read+write access to `./node_modules`. These are required
- so Pyodide can download and cache the Python standard library and packages
-- `--node-modules-dir=auto` tells deno to use a local `node_modules` directory
-- `stdio` runs the server with the
- [Stdio MCP transport](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#stdio) — suitable for
- running the process as a subprocess locally
-- `streamable_http` runs the server with the
- [Streamable HTTP MCP transport](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http) -
- suitable for running the server as an HTTP server to connect locally or remotely. This supports stateful requests, but does not require the client to hold a stateful connection like SSE
-- `sse` runs the server with the
- [SSE MCP transport](https://modelcontextprotocol.io/specification/2024-11-05/basic/transports#http-with-sse) —
- suitable for running the server as an HTTP server to connect locally or remotely. Note that the SSE transport has been
- [deprecated in newer MCP protocol versions](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#backwards-compatibility)
- and is there to maintain backwards compatibility.
-- `warmup` will run a minimal Python script to download and cache the Python
- standard library. This is also useful to check the server is running
- correctly.
-
-Usage of `jsr:@pydantic/mcp-run-python` with Pydantic AI is described in the [client](client.md#mcp-stdio-server) documentation.
-
-## Direct Usage
-
-As well as using this server with Pydantic AI, it can be connected to other MCP clients. For clarity, in this example we connect directly using the [Python MCP client](https://github.com/modelcontextprotocol/python-sdk).
-
-```python {title="mcp_run_python.py"}
-from mcp import ClientSession, StdioServerParameters
-from mcp.client.stdio import stdio_client
-
-code = """
-import numpy
-a = numpy.array([1, 2, 3])
-print(a)
-a
-"""
-server_params = StdioServerParameters(
- command='deno',
- args=[
- 'run',
- '-N',
- '-R=node_modules',
- '-W=node_modules',
- '--node-modules-dir=auto',
- 'jsr:@pydantic/mcp-run-python',
- 'stdio',
- ],
-)
-
-
-async def main():
- async with stdio_client(server_params) as (read, write):
- async with ClientSession(read, write) as session:
- await session.initialize()
- tools = await session.list_tools()
- print(len(tools.tools))
- #> 1
- print(repr(tools.tools[0].name))
- #> 'run_python_code'
- print(repr(tools.tools[0].inputSchema))
- """
- {'type': 'object', 'properties': {'python_code': {'type': 'string', 'description': 'Python code to run'}}, 'required': ['python_code'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}
- """
- result = await session.call_tool('run_python_code', {'python_code': code})
- print(result.content[0].text)
- """
- success
- ["numpy"]
-
-
- [
- 1,
- 2,
- 3
- ]
-
- """
-```
-
-If an exception occurs, `status` will be `install-error` or `run-error` and `return_value` will be replaced
-by `error` which will include the traceback and exception message.
-
-## Dependencies
-
-Dependencies are installed when code is run.
-
-Dependencies can be defined in one of two ways:
-
-### Inferred from imports
-
-If there's no metadata, dependencies are inferred from imports in the code,
-as shown in the example [above](#direct-usage).
-
-### Inline script metadata
-
-As introduced in PEP 723, explained [here](https://packaging.python.org/en/latest/specifications/inline-script-metadata/#inline-script-metadata), and popularized by [uv](https://docs.astral.sh/uv/guides/scripts/#declaring-script-dependencies) — dependencies can be defined in a comment at the top of the file.
-
-This allows use of dependencies that aren't imported in the code, and is more explicit.
-
-```py {title="inline_script_metadata.py" requires="mcp_run_python.py"}
-from mcp import ClientSession
-from mcp.client.stdio import stdio_client
-
-# using `server_params` from the above example.
-from mcp_run_python import server_params
-
-code = """\
-# /// script
-# dependencies = ["pydantic", "email-validator"]
-# ///
-import pydantic
-
-class Model(pydantic.BaseModel):
- email: pydantic.EmailStr
-
-print(Model(email='hello@pydantic.dev'))
-"""
-
-
-async def main():
- async with stdio_client(server_params) as (read, write):
- async with ClientSession(read, write) as session:
- await session.initialize()
- result = await session.call_tool('run_python_code', {'python_code': code})
- print(result.content[0].text)
- """
- success
- ["pydantic","email-validator"]
-
- """
-```
-
-It also allows versions to be pinned for non-binary packages (Pyodide only supports a single version for the binary packages it supports, like `pydantic` and `numpy`).
-
-E.g. you could set the dependencies to
-
-```python
-# /// script
-# dependencies = ["rich<13"]
-# ///
-```
-
-## Logging
-
-MCP Run Python supports emitting stdout and stderr from the python execution as [MCP logging messages](https://github.com/modelcontextprotocol/specification/blob/eb4abdf2bb91e0d5afd94510741eadd416982350/docs/specification/draft/server/utilities/logging.md?plain=1).
-
-For logs to be emitted you must set the logging level when connecting to the server. By default, the log level is set to the highest level, `emergency`.
-
-Currently, it's not possible to demonstrate this due to a bug in the Python MCP Client, see [modelcontextprotocol/python-sdk#201](https://github.com/modelcontextprotocol/python-sdk/issues/201#issuecomment-2727663121).
diff --git a/mcp-run-python/.gitignore b/mcp-run-python/.gitignore
deleted file mode 100644
index 76111564d9..0000000000
--- a/mcp-run-python/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-node_modules
-src/prepareEnvCode.ts
diff --git a/mcp-run-python/.zed/settings.json b/mcp-run-python/.zed/settings.json
deleted file mode 100644
index fe7afdd752..0000000000
--- a/mcp-run-python/.zed/settings.json
+++ /dev/null
@@ -1,23 +0,0 @@
-// Folder-specific settings to tell zed to use deno not npm
-{
- "languages": {
- "TypeScript": {
- "language_servers": [
- "deno",
- "!typescript-language-server",
- "!vtsls",
- "!eslint"
- ],
- "formatter": "language_server"
- }
- },
- "lsp": {
- "deno": {
- "settings": {
- "deno": {
- "enable": true
- }
- }
- }
- }
-}
diff --git a/mcp-run-python/LICENSE b/mcp-run-python/LICENSE
deleted file mode 100644
index 1bf1f55e6d..0000000000
--- a/mcp-run-python/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Pydantic Services Inc. 2024 to present
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/mcp-run-python/README.md b/mcp-run-python/README.md
deleted file mode 100644
index db70ab514e..0000000000
--- a/mcp-run-python/README.md
+++ /dev/null
@@ -1,73 +0,0 @@
-# MCP Run Python
-
-[Model Context Protocol](https://modelcontextprotocol.io/) server to run Python code in a sandbox.
-
-The code is executed using [Pyodide](https://pyodide.org) in [Deno](https://deno.com/) and is therefore isolated from
-the rest of the operating system.
-
-**See for complete documentation.**
-
-The server can be run with `deno` installed using:
-
-```bash
-deno run \
- -N -R=node_modules -W=node_modules --node-modules-dir=auto \
- jsr:@pydantic/mcp-run-python [stdio|streamable_http|sse|warmup]
-```
-
-where:
-
-- `-N -R=node_modules -W=node_modules` (alias of `--allow-net --allow-read=node_modules --allow-write=node_modules`)
- allows network access and read+write access to `./node_modules`. These are required so pyodide can download and cache
- the Python standard library and packages
-- `--node-modules-dir=auto` tells deno to use a local `node_modules` directory
-- `stdio` runs the server with the
- [Stdio MCP transport](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#stdio) — suitable for
- running the process as a subprocess locally
-- `streamable_http` runs the server with the
- [Streamable HTTP MCP transport](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http) -
- suitable for running the server as an HTTP server to connect locally or remotely. This supports stateful requests, but
- does not require the client to hold a stateful connection like SSE
-- `sse` runs the server with the
- [SSE MCP transport](https://modelcontextprotocol.io/specification/2024-11-05/basic/transports#http-with-sse) —
- suitable for running the server as an HTTP server to connect locally or remotely. Note that the SSE transport has been
- [deprecated in newer MCP protocol versions](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#backwards-compatibility)
- and is there to maintain backwards compatibility.
-- `warmup` will run a minimal Python script to download and cache the Python standard library. This is also useful to
- check the server is running correctly.
-
-Here's an example of using `@pydantic/mcp-run-python` with Pydantic AI:
-
-```python
-from pydantic_ai import Agent
-from pydantic_ai.mcp import MCPServerStdio
-
-import logfire
-
-logfire.configure()
-logfire.instrument_mcp()
-logfire.instrument_pydantic_ai()
-
-server = MCPServerStdio('deno',
- args=[
- 'run',
- '-N',
- '-R=node_modules',
- '-W=node_modules',
- '--node-modules-dir=auto',
- 'jsr:@pydantic/mcp-run-python',
- 'stdio',
- ])
-agent = Agent('claude-3-5-haiku-latest', toolsets=[server])
-
-
-async def main():
- async with agent:
- result = await agent.run('How many days between 2000-01-01 and 2025-03-18?')
- print(result.output)
- #> There are 9,208 days between January 1, 2000, and March 18, 2025.w
-
-if __name__ == '__main__':
- import asyncio
- asyncio.run(main())
-```
diff --git a/mcp-run-python/build.ts b/mcp-run-python/build.ts
deleted file mode 100644
index afac19b096..0000000000
--- a/mcp-run-python/build.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-// inline src/prepare_env.py into src/prepareEnvCode.js
-import * as path from '@std/path'
-
-if (!import.meta.dirname) {
- throw new Error('import.meta.dirname is not defined, unable to load prepare_env.py')
-}
-const src = path.join(import.meta.dirname, 'src/prepare_env.py')
-const dst = path.join(import.meta.dirname, 'src/prepareEnvCode.ts')
-
-let pythonCode = await Deno.readTextFile(src)
-pythonCode = pythonCode.replace(/\\/g, '\\\\')
-const jsCode = `\
-// DO NOT EDIT THIS FILE DIRECTLY, INSTEAD RUN "deno run build"
-export const preparePythonCode = \`${pythonCode}\`
-`
-await Deno.writeTextFile(dst, jsCode)
diff --git a/mcp-run-python/deno.json b/mcp-run-python/deno.json
deleted file mode 100644
index 32892264a5..0000000000
--- a/mcp-run-python/deno.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "name": "@pydantic/mcp-run-python",
- "version": "0.0.16",
- "license": "MIT",
- "nodeModulesDir": "auto",
- "exports": {
- ".": "./src/main.ts"
- },
- "tasks": {
- "build": "deno run -R=. -W=src build.ts",
- "lint-format": "deno task build && deno fmt && deno lint && deno check src && deno publish --dry-run --allow-dirty",
- "dev": "deno task build && deno run -N -R=node_modules -W=node_modules src/main.ts",
- "build-publish": "deno task build && deno publish"
- },
- "imports": {
- "@modelcontextprotocol/sdk": "npm:@modelcontextprotocol/sdk@^1.17.4",
- "@std/cli": "jsr:@std/cli@^1.0.15",
- "@std/path": "jsr:@std/path@^1.0.8",
- // do NOT upgrade above this version until there is a workaround for https://github.com/pyodide/pyodide/pull/5621
- "pyodide": "npm:pyodide@0.27.6",
- "zod": "npm:zod@^3.24.2"
- },
- "fmt": {
- "lineWidth": 120,
- "semiColons": false,
- "singleQuote": true,
- "include": ["."]
- },
- "publish": {
- "include": [
- "src/*.ts",
- "src/prepareEnvCode.ts", // required to override gitignore
- "README.md",
- "deno.json"
- ]
- }
-}
diff --git a/mcp-run-python/deno.lock b/mcp-run-python/deno.lock
deleted file mode 100644
index 1aa7ea5fea..0000000000
--- a/mcp-run-python/deno.lock
+++ /dev/null
@@ -1,543 +0,0 @@
-{
- "version": "5",
- "specifiers": {
- "jsr:@std/cli@*": "1.0.15",
- "jsr:@std/cli@^1.0.15": "1.0.15",
- "jsr:@std/path@*": "1.0.8",
- "jsr:@std/path@^1.0.8": "1.0.8",
- "npm:@modelcontextprotocol/sdk@^1.17.4": "1.17.4_express@5.1.0_zod@3.25.76",
- "npm:@types/node@*": "22.12.0",
- "npm:@types/node@22.12.0": "22.12.0",
- "npm:pyodide@0.27.6": "0.27.6",
- "npm:zod@^3.24.2": "3.25.76"
- },
- "jsr": {
- "@std/cli@1.0.15": {
- "integrity": "e79ba3272ec710ca44d8342a7688e6288b0b88802703f3264184b52893d5e93f"
- },
- "@std/path@1.0.8": {
- "integrity": "548fa456bb6a04d3c1a1e7477986b6cffbce95102d0bb447c67c4ee70e0364be"
- }
- },
- "npm": {
- "@modelcontextprotocol/sdk@1.17.4_express@5.1.0_zod@3.25.76": {
- "integrity": "sha512-zq24hfuAmmlNZvik0FLI58uE5sriN0WWsQzIlYnzSuKDAHFqJtBFrl/LfB1NLgJT5Y7dEBzaX4yAKqOPrcetaw==",
- "dependencies": [
- "ajv",
- "content-type",
- "cors",
- "cross-spawn",
- "eventsource",
- "eventsource-parser",
- "express",
- "express-rate-limit",
- "pkce-challenge",
- "raw-body",
- "zod",
- "zod-to-json-schema"
- ]
- },
- "@types/node@22.12.0": {
- "integrity": "sha512-Fll2FZ1riMjNmlmJOdAyY5pUbkftXslB5DgEzlIuNaiWhXd00FhWxVC/r4yV/4wBb9JfImTu+jiSvXTkJ7F/gA==",
- "dependencies": [
- "undici-types"
- ]
- },
- "accepts@2.0.0": {
- "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==",
- "dependencies": [
- "mime-types",
- "negotiator"
- ]
- },
- "ajv@6.12.6": {
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "dependencies": [
- "fast-deep-equal",
- "fast-json-stable-stringify",
- "json-schema-traverse",
- "uri-js"
- ]
- },
- "body-parser@2.2.0": {
- "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==",
- "dependencies": [
- "bytes",
- "content-type",
- "debug",
- "http-errors",
- "iconv-lite",
- "on-finished",
- "qs",
- "raw-body",
- "type-is"
- ]
- },
- "bytes@3.1.2": {
- "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="
- },
- "call-bind-apply-helpers@1.0.2": {
- "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
- "dependencies": [
- "es-errors",
- "function-bind"
- ]
- },
- "call-bound@1.0.4": {
- "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
- "dependencies": [
- "call-bind-apply-helpers",
- "get-intrinsic"
- ]
- },
- "content-disposition@1.0.0": {
- "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==",
- "dependencies": [
- "safe-buffer"
- ]
- },
- "content-type@1.0.5": {
- "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="
- },
- "cookie-signature@1.2.2": {
- "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg=="
- },
- "cookie@0.7.2": {
- "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="
- },
- "cors@2.8.5": {
- "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==",
- "dependencies": [
- "object-assign",
- "vary"
- ]
- },
- "cross-spawn@7.0.6": {
- "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
- "dependencies": [
- "path-key",
- "shebang-command",
- "which"
- ]
- },
- "debug@4.4.1": {
- "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
- "dependencies": [
- "ms"
- ]
- },
- "depd@2.0.0": {
- "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="
- },
- "dunder-proto@1.0.1": {
- "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
- "dependencies": [
- "call-bind-apply-helpers",
- "es-errors",
- "gopd"
- ]
- },
- "ee-first@1.1.1": {
- "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
- },
- "encodeurl@2.0.0": {
- "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="
- },
- "es-define-property@1.0.1": {
- "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="
- },
- "es-errors@1.3.0": {
- "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="
- },
- "es-object-atoms@1.1.1": {
- "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
- "dependencies": [
- "es-errors"
- ]
- },
- "escape-html@1.0.3": {
- "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="
- },
- "etag@1.8.1": {
- "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="
- },
- "eventsource-parser@3.0.5": {
- "integrity": "sha512-bSRG85ZrMdmWtm7qkF9He9TNRzc/Bm99gEJMaQoHJ9E6Kv9QBbsldh2oMj7iXmYNEAVvNgvv5vPorG6W+XtBhQ=="
- },
- "eventsource@3.0.7": {
- "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==",
- "dependencies": [
- "eventsource-parser"
- ]
- },
- "express-rate-limit@7.5.1_express@5.1.0": {
- "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==",
- "dependencies": [
- "express"
- ]
- },
- "express@5.1.0": {
- "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
- "dependencies": [
- "accepts",
- "body-parser",
- "content-disposition",
- "content-type",
- "cookie",
- "cookie-signature",
- "debug",
- "encodeurl",
- "escape-html",
- "etag",
- "finalhandler",
- "fresh",
- "http-errors",
- "merge-descriptors",
- "mime-types",
- "on-finished",
- "once",
- "parseurl",
- "proxy-addr",
- "qs",
- "range-parser",
- "router",
- "send",
- "serve-static",
- "statuses@2.0.2",
- "type-is",
- "vary"
- ]
- },
- "fast-deep-equal@3.1.3": {
- "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
- },
- "fast-json-stable-stringify@2.1.0": {
- "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
- },
- "finalhandler@2.1.0": {
- "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==",
- "dependencies": [
- "debug",
- "encodeurl",
- "escape-html",
- "on-finished",
- "parseurl",
- "statuses@2.0.2"
- ]
- },
- "forwarded@0.2.0": {
- "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="
- },
- "fresh@2.0.0": {
- "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A=="
- },
- "function-bind@1.1.2": {
- "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="
- },
- "get-intrinsic@1.3.0": {
- "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
- "dependencies": [
- "call-bind-apply-helpers",
- "es-define-property",
- "es-errors",
- "es-object-atoms",
- "function-bind",
- "get-proto",
- "gopd",
- "has-symbols",
- "hasown",
- "math-intrinsics"
- ]
- },
- "get-proto@1.0.1": {
- "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
- "dependencies": [
- "dunder-proto",
- "es-object-atoms"
- ]
- },
- "gopd@1.2.0": {
- "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="
- },
- "has-symbols@1.1.0": {
- "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="
- },
- "hasown@2.0.2": {
- "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
- "dependencies": [
- "function-bind"
- ]
- },
- "http-errors@2.0.0": {
- "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
- "dependencies": [
- "depd",
- "inherits",
- "setprototypeof",
- "statuses@2.0.1",
- "toidentifier"
- ]
- },
- "iconv-lite@0.6.3": {
- "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
- "dependencies": [
- "safer-buffer"
- ]
- },
- "inherits@2.0.4": {
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
- },
- "ipaddr.js@1.9.1": {
- "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="
- },
- "is-promise@4.0.0": {
- "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ=="
- },
- "isexe@2.0.0": {
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
- },
- "json-schema-traverse@0.4.1": {
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
- },
- "math-intrinsics@1.1.0": {
- "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="
- },
- "media-typer@1.1.0": {
- "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="
- },
- "merge-descriptors@2.0.0": {
- "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="
- },
- "mime-db@1.54.0": {
- "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="
- },
- "mime-types@3.0.1": {
- "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
- "dependencies": [
- "mime-db"
- ]
- },
- "ms@2.1.3": {
- "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
- },
- "negotiator@1.0.0": {
- "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="
- },
- "object-assign@4.1.1": {
- "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="
- },
- "object-inspect@1.13.4": {
- "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="
- },
- "on-finished@2.4.1": {
- "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
- "dependencies": [
- "ee-first"
- ]
- },
- "once@1.4.0": {
- "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
- "dependencies": [
- "wrappy"
- ]
- },
- "parseurl@1.3.3": {
- "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="
- },
- "path-key@3.1.1": {
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="
- },
- "path-to-regexp@8.2.0": {
- "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ=="
- },
- "pkce-challenge@5.0.0": {
- "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ=="
- },
- "proxy-addr@2.0.7": {
- "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
- "dependencies": [
- "forwarded",
- "ipaddr.js"
- ]
- },
- "punycode@2.3.1": {
- "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="
- },
- "pyodide@0.27.6": {
- "integrity": "sha512-ahiSHHs6iFKl2f8aO1wALINAlMNDLAtb44xCI87GQyH2tLDk8F8VWip3u1ZNIyglGSCYAOSFzWKwS1f9gBFVdg==",
- "dependencies": [
- "ws"
- ]
- },
- "qs@6.14.0": {
- "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
- "dependencies": [
- "side-channel"
- ]
- },
- "range-parser@1.2.1": {
- "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="
- },
- "raw-body@3.0.0": {
- "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==",
- "dependencies": [
- "bytes",
- "http-errors",
- "iconv-lite",
- "unpipe"
- ]
- },
- "router@2.2.0": {
- "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==",
- "dependencies": [
- "debug",
- "depd",
- "is-promise",
- "parseurl",
- "path-to-regexp"
- ]
- },
- "safe-buffer@5.2.1": {
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
- },
- "safer-buffer@2.1.2": {
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
- },
- "send@1.2.0": {
- "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==",
- "dependencies": [
- "debug",
- "encodeurl",
- "escape-html",
- "etag",
- "fresh",
- "http-errors",
- "mime-types",
- "ms",
- "on-finished",
- "range-parser",
- "statuses@2.0.2"
- ]
- },
- "serve-static@2.2.0": {
- "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==",
- "dependencies": [
- "encodeurl",
- "escape-html",
- "parseurl",
- "send"
- ]
- },
- "setprototypeof@1.2.0": {
- "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
- },
- "shebang-command@2.0.0": {
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dependencies": [
- "shebang-regex"
- ]
- },
- "shebang-regex@3.0.0": {
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="
- },
- "side-channel-list@1.0.0": {
- "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
- "dependencies": [
- "es-errors",
- "object-inspect"
- ]
- },
- "side-channel-map@1.0.1": {
- "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
- "dependencies": [
- "call-bound",
- "es-errors",
- "get-intrinsic",
- "object-inspect"
- ]
- },
- "side-channel-weakmap@1.0.2": {
- "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
- "dependencies": [
- "call-bound",
- "es-errors",
- "get-intrinsic",
- "object-inspect",
- "side-channel-map"
- ]
- },
- "side-channel@1.1.0": {
- "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
- "dependencies": [
- "es-errors",
- "object-inspect",
- "side-channel-list",
- "side-channel-map",
- "side-channel-weakmap"
- ]
- },
- "statuses@2.0.1": {
- "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="
- },
- "statuses@2.0.2": {
- "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="
- },
- "toidentifier@1.0.1": {
- "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="
- },
- "type-is@2.0.1": {
- "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==",
- "dependencies": [
- "content-type",
- "media-typer",
- "mime-types"
- ]
- },
- "undici-types@6.20.0": {
- "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="
- },
- "unpipe@1.0.0": {
- "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="
- },
- "uri-js@4.4.1": {
- "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
- "dependencies": [
- "punycode"
- ]
- },
- "vary@1.1.2": {
- "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="
- },
- "which@2.0.2": {
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dependencies": [
- "isexe"
- ],
- "bin": true
- },
- "wrappy@1.0.2": {
- "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
- },
- "ws@8.18.2": {
- "integrity": "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ=="
- },
- "zod-to-json-schema@3.24.6_zod@3.25.76": {
- "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==",
- "dependencies": [
- "zod"
- ]
- },
- "zod@3.25.76": {
- "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="
- }
- },
- "workspace": {
- "dependencies": [
- "jsr:@std/cli@^1.0.15",
- "jsr:@std/path@^1.0.8",
- "npm:@modelcontextprotocol/sdk@^1.17.4",
- "npm:pyodide@0.27.6",
- "npm:zod@^3.24.2"
- ]
- }
-}
diff --git a/mcp-run-python/pyproject.toml b/mcp-run-python/pyproject.toml
deleted file mode 100644
index 9bcf610d51..0000000000
--- a/mcp-run-python/pyproject.toml
+++ /dev/null
@@ -1,20 +0,0 @@
-[project]
-name = "mcp-run-python"
-version = "0.0.1"
-readme = "README.md"
-classifiers = ["Private :: do not release"]
-
-[dependency-groups]
-dev = [
- "anyio>=4.5.0",
- "dirty-equals>=0.9.0",
- "httpx>=0.28.1",
- "inline-snapshot>=0.19.3",
- "mcp>=1.4.1; python_version >= '3.10'",
- "micropip>=0.9.0; python_version >= '3.12'",
- "pytest>=8.3.3",
- "pytest-pretty>=1.2.0",
-]
-
-[tool.uv.sources]
-mcp-run-python = { workspace = true }
diff --git a/mcp-run-python/src/main.ts b/mcp-run-python/src/main.ts
deleted file mode 100644
index 826be363fe..0000000000
--- a/mcp-run-python/src/main.ts
+++ /dev/null
@@ -1,374 +0,0 @@
-///
-
-import './polyfill.ts'
-import http from 'node:http'
-import { randomUUID } from 'node:crypto'
-import { parseArgs } from '@std/cli/parse-args'
-import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'
-import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'
-import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'
-import { isInitializeRequest } from '@modelcontextprotocol/sdk/types.js'
-import { type LoggingLevel, SetLevelRequestSchema } from '@modelcontextprotocol/sdk/types.js'
-import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'
-import { z } from 'zod'
-
-import { asXml, runCode } from './runCode.ts'
-import { Buffer } from 'node:buffer'
-
-const VERSION = '0.0.13'
-
-export async function main() {
- const { args } = Deno
- if (args.length === 1 && args[0] === 'stdio') {
- await runStdio()
- } else if (args.length >= 1 && args[0] === 'streamable_http') {
- const flags = parseArgs(Deno.args, {
- string: ['port'],
- default: { port: '3001' },
- })
- const port = parseInt(flags.port)
- runStreamableHttp(port)
- } else if (args.length >= 1 && args[0] === 'sse') {
- const flags = parseArgs(Deno.args, {
- string: ['port'],
- default: { port: '3001' },
- })
- const port = parseInt(flags.port)
- runSse(port)
- } else if (args.length === 1 && args[0] === 'warmup') {
- await warmup()
- } else {
- console.error(
- `\
-Invalid arguments.
-
-Usage: deno run -N -R=node_modules -W=node_modules --node-modules-dir=auto jsr:@pydantic/mcp-run-python [stdio|streamable_http|sse|warmup]
-
-options:
- --port Port to run the SSE server on (default: 3001)`,
- )
- Deno.exit(1)
- }
-}
-
-/*
- * Create an MCP server with the `run_python_code` tool registered.
- */
-function createServer(): McpServer {
- const server = new McpServer(
- {
- name: 'MCP Run Python',
- version: VERSION,
- },
- {
- instructions: 'Call the "run_python_code" tool with the Python code to run.',
- capabilities: {
- logging: {},
- },
- },
- )
-
- const toolDescription = `Tool to execute Python code and return stdout, stderr, and return value.
-
-The code may be async, and the value on the last line will be returned as the return value.
-
-The code will be executed with Python 3.12.
-
-Dependencies may be defined via PEP 723 script metadata, e.g. to install "pydantic", the script should start
-with a comment of the form:
-
-# /// script
-# dependencies = ['pydantic']
-# ///
-print('python code here')
-`
-
- let setLogLevel: LoggingLevel = 'emergency'
-
- server.server.setRequestHandler(SetLevelRequestSchema, (request) => {
- setLogLevel = request.params.level
- return {}
- })
-
- server.registerTool(
- 'run_python_code',
- {
- title: 'Run Python Code',
- description: toolDescription,
- inputSchema: { python_code: z.string().describe('Python code to run') },
- },
- async ({ python_code }: { python_code: string }) => {
- const logPromises: Promise[] = []
- const result = await runCode(
- [
- {
- name: 'main.py',
- content: python_code,
- active: true,
- },
- ],
- (level, data) => {
- if (LogLevels.indexOf(level) >= LogLevels.indexOf(setLogLevel)) {
- logPromises.push(server.server.sendLoggingMessage({ level, data }))
- }
- },
- )
- await Promise.all(logPromises)
- return {
- content: [{ type: 'text', text: asXml(result) }],
- }
- },
- )
- return server
-}
-
-/*
- * Define some QOL functions for both the SSE and Streamable HTTP server implementation
- */
-function httpGetUrl(req: http.IncomingMessage): URL {
- return new URL(req.url ?? '', `http://${req.headers.host ?? 'unknown'}`)
-}
-
-function httpGetBody(req: http.IncomingMessage): Promise {
- // https://nodejs.org/en/learn/modules/anatomy-of-an-http-transaction#request-body
- return new Promise((resolve) => {
- // deno-lint-ignore no-explicit-any
- const bodyParts: any[] = []
- let body
- req
- .on('data', (chunk) => {
- bodyParts.push(chunk)
- })
- .on('end', () => {
- body = Buffer.concat(bodyParts).toString()
- resolve(JSON.parse(body))
- })
- })
-}
-
-function httpSetTextResponse(
- res: http.ServerResponse,
- status: number,
- text: string,
-) {
- res.setHeader('Content-Type', 'text/plain')
- res.statusCode = status
- res.end(`${text}\n`)
-}
-
-function httpSetJsonResponse(
- res: http.ServerResponse,
- status: number,
- text: string,
- code: number,
-) {
- res.setHeader('Content-Type', 'application/json')
- res.statusCode = status
- res.write(
- JSON.stringify({
- jsonrpc: '2.0',
- error: {
- code: code,
- message: text,
- },
- id: null,
- }),
- )
- res.end()
-}
-
-/*
- * Run the MCP server using the Streamable HTTP transport
- */
-function runStreamableHttp(port: number) {
- // https://github.com/modelcontextprotocol/typescript-sdk?tab=readme-ov-file#with-session-management
- const mcpServer = createServer()
- const transports: { [sessionId: string]: StreamableHTTPServerTransport } = {}
-
- const server = http.createServer(async (req, res) => {
- const url = httpGetUrl(req)
- let pathMatch = false
- function match(method: string, path: string): boolean {
- if (url.pathname === path) {
- pathMatch = true
- return req.method === method
- }
- return false
- }
-
- // Reusable handler for GET and DELETE requests
- async function handleSessionRequest() {
- const sessionId = req.headers['mcp-session-id'] as string | undefined
- if (!sessionId || !transports[sessionId]) {
- httpSetTextResponse(res, 400, 'Invalid or missing session ID')
- return
- }
-
- const transport = transports[sessionId]
- await transport.handleRequest(req, res)
- }
-
- // Handle different request methods and paths
- if (match('POST', '/mcp')) {
- // Check for existing session ID
- const sessionId = req.headers['mcp-session-id'] as string | undefined
- let transport: StreamableHTTPServerTransport
-
- const body = await httpGetBody(req)
-
- if (sessionId && transports[sessionId]) {
- // Reuse existing transport
- transport = transports[sessionId]
- } else if (!sessionId && isInitializeRequest(body)) {
- // New initialization request
- transport = new StreamableHTTPServerTransport({
- sessionIdGenerator: () => randomUUID(),
- onsessioninitialized: (sessionId) => {
- // Store the transport by session ID
- transports[sessionId] = transport
- },
- })
-
- // Clean up transport when closed
- transport.onclose = () => {
- if (transport.sessionId) {
- delete transports[transport.sessionId]
- }
- }
-
- await mcpServer.connect(transport)
- } else {
- httpSetJsonResponse(
- res,
- 400,
- 'Bad Request: No valid session ID provided',
- -32000,
- )
- return
- }
-
- // Handle the request
- await transport.handleRequest(req, res, body)
- } else if (match('GET', '/mcp')) {
- // Handle server-to-client notifications via SSE
- await handleSessionRequest()
- } else if (match('DELETE', '/mcp')) {
- // Handle requests for session termination
- await handleSessionRequest()
- } else if (pathMatch) {
- httpSetTextResponse(res, 405, 'Method not allowed')
- } else {
- httpSetTextResponse(res, 404, 'Page not found')
- }
- })
-
- server.listen(port, () => {
- console.log(
- `Running MCP Run Python version ${VERSION} with Streamable HTTP transport on port ${port}`,
- )
- })
-}
-
-/*
- * Run the MCP server using the SSE transport, e.g. over HTTP.
- */
-function runSse(port: number) {
- const mcpServer = createServer()
- const transports: { [sessionId: string]: SSEServerTransport } = {}
-
- const server = http.createServer(async (req, res) => {
- const url = httpGetUrl(req)
- let pathMatch = false
- function match(method: string, path: string): boolean {
- if (url.pathname === path) {
- pathMatch = true
- return req.method === method
- }
- return false
- }
-
- if (match('GET', '/sse')) {
- const transport = new SSEServerTransport('/messages', res)
- transports[transport.sessionId] = transport
- res.on('close', () => {
- delete transports[transport.sessionId]
- })
- await mcpServer.connect(transport)
- } else if (match('POST', '/messages')) {
- const sessionId = url.searchParams.get('sessionId') ?? ''
- const transport = transports[sessionId]
- if (transport) {
- await transport.handlePostMessage(req, res)
- } else {
- httpSetTextResponse(
- res,
- 400,
- `No transport found for sessionId '${sessionId}'`,
- )
- }
- } else if (pathMatch) {
- httpSetTextResponse(res, 405, 'Method not allowed')
- } else {
- httpSetTextResponse(res, 404, 'Page not found')
- }
- })
-
- server.listen(port, () => {
- console.log(
- `Running MCP Run Python version ${VERSION} with SSE transport on port ${port}`,
- )
- })
-}
-
-/*
- * Run the MCP server using the Stdio transport.
- */
-async function runStdio() {
- const mcpServer = createServer()
- const transport = new StdioServerTransport()
- await mcpServer.connect(transport)
-}
-
-/*
- * Run pyodide to download packages which can otherwise interrupt the server
- */
-async function warmup() {
- console.error(
- `Running warmup script for MCP Run Python version ${VERSION}...`,
- )
- const code = `
-import numpy
-a = numpy.array([1, 2, 3])
-print('numpy array:', a)
-a
-`
- const result = await runCode(
- [
- {
- name: 'warmup.py',
- content: code,
- active: true,
- },
- ],
- (level, data) =>
- // use warn to avoid recursion since console.log is patched in runCode
- console.error(`${level}: ${data}`),
- )
- console.log('Tool return value:')
- console.log(asXml(result))
- console.log('\nwarmup successful 🎉')
-}
-
-// list of log levels to use for level comparison
-const LogLevels: LoggingLevel[] = [
- 'debug',
- 'info',
- 'notice',
- 'warning',
- 'error',
- 'critical',
- 'alert',
- 'emergency',
-]
-
-await main()
diff --git a/mcp-run-python/src/polyfill.ts b/mcp-run-python/src/polyfill.ts
deleted file mode 100644
index 51ed03fd3c..0000000000
--- a/mcp-run-python/src/polyfill.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-import process from 'node:process'
-
-// Stub `process.env` and always return an empty object
-Object.defineProperty(process, 'env', {
- get() {
- return {}
- },
-})
diff --git a/mcp-run-python/src/prepare_env.py b/mcp-run-python/src/prepare_env.py
deleted file mode 100644
index e22db9ca74..0000000000
--- a/mcp-run-python/src/prepare_env.py
+++ /dev/null
@@ -1,200 +0,0 @@
-"""Logic for installing dependencies in Pyodide.
-
-Mostly taken from https://github.com/pydantic/pydantic.run/blob/main/src/frontend/src/prepare_env.py
-"""
-
-from __future__ import annotations as _annotations
-
-import importlib
-import logging
-import re
-import sys
-import traceback
-from collections.abc import Iterable, Iterator
-from contextlib import contextmanager
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Any, Literal, TypedDict
-
-import micropip
-import pyodide_js
-import tomllib
-from pyodide.code import find_imports
-
-__all__ = 'prepare_env', 'dump_json'
-
-
-class File(TypedDict):
- name: str
- content: str
- active: bool
-
-
-@dataclass
-class Success:
- dependencies: list[str] | None
- kind: Literal['success'] = 'success'
-
-
-@dataclass
-class Error:
- message: str
- kind: Literal['error'] = 'error'
-
-
-async def prepare_env(files: list[File]) -> Success | Error:
- sys.setrecursionlimit(400)
-
- cwd = Path.cwd()
- for file in files:
- (cwd / file['name']).write_text(file['content'])
-
- active: File | None = next((f for f in files if f['active']), None)
-
- dependencies: list[str] | None = None
- if active:
- python_code = active['content']
- dependencies = _find_pep723_dependencies(python_code)
- if dependencies is None:
- dependencies = await _find_import_dependencies(python_code)
-
- if dependencies:
- dependencies = _add_extra_dependencies(dependencies)
-
- with _micropip_logging() as logs_filename:
- try:
- await micropip.install(dependencies, keep_going=True)
- importlib.invalidate_caches()
- except Exception:
- with open(logs_filename) as f:
- logs = f.read()
- return Error(message=f'{logs} {traceback.format_exc()}')
-
- return Success(dependencies=dependencies)
-
-
-def dump_json(value: Any) -> str | None:
- from pydantic_core import to_json
-
- if value is None:
- return None
- if isinstance(value, str):
- return value
- else:
- return to_json(value, indent=2, fallback=_json_fallback).decode()
-
-
-def _json_fallback(value: Any) -> Any:
- tp: Any = type(value)
- module = tp.__module__
- if module == 'numpy':
- if tp.__name__ in {'ndarray', 'matrix'}:
- return value.tolist()
- else:
- return value.item()
- elif module == 'pyodide.ffi':
- return value.to_py()
- else:
- return repr(value)
-
-
-def _add_extra_dependencies(dependencies: list[str]) -> list[str]:
- """Add extra dependencies we know some packages need.
-
- Workaround for micropip not installing some required transitive dependencies.
- See https://github.com/pyodide/micropip/issues/204
-
- pygments seems to be required to get rich to work properly, ssl is required for FastAPI and HTTPX,
- pydantic_ai requires newest typing_extensions.
- """
- extras: list[str] = []
- for d in dependencies:
- if d.startswith(('logfire', 'rich')):
- extras.append('pygments')
- elif d.startswith(('fastapi', 'httpx', 'pydantic_ai')):
- extras.append('ssl')
-
- if d.startswith('pydantic_ai'):
- extras.append('typing_extensions>=4.12')
-
- if len(extras) == 3:
- break
-
- return dependencies + extras
-
-
-@contextmanager
-def _micropip_logging() -> Iterator[str]:
- from micropip import logging as micropip_logging
-
- micropip_logging.setup_logging()
- logger = logging.getLogger('micropip')
- logger.handlers.clear()
- logger.setLevel(logging.INFO)
-
- file_name = 'micropip.log'
- handler = logging.FileHandler(file_name)
- handler.setLevel(logging.INFO)
- handler.setFormatter(logging.Formatter('%(message)s'))
- logger.addHandler(handler)
- try:
- yield file_name
- finally:
- logger.removeHandler(handler)
-
-
-def _find_pep723_dependencies(code: str) -> list[str] | None:
- """Extract dependencies from a script with PEP 723 metadata."""
- metadata = _read_pep723_metadata(code)
- dependencies: list[str] | None = metadata.get('dependencies')
- if dependencies is None:
- return None
- else:
- assert isinstance(dependencies, list), 'dependencies must be a list'
- assert all(isinstance(dep, str) for dep in dependencies), 'dependencies must be a list of strings'
- return dependencies
-
-
-def _read_pep723_metadata(code: str) -> dict[str, Any]:
- """Read PEP 723 script metadata.
-
- Copied from https://packaging.python.org/en/latest/specifications/inline-script-metadata/#reference-implementation
- """
- name = 'script'
- magic_comment_regex = r'(?m)^# /// (?P[a-zA-Z0-9-]+)$\s(?P(^#(| .*)$\s)+)^# ///$'
- matches = list(filter(lambda m: m.group('type') == name, re.finditer(magic_comment_regex, code)))
- if len(matches) > 1:
- raise ValueError(f'Multiple {name} blocks found')
- elif len(matches) == 1:
- content = ''.join(
- line[2:] if line.startswith('# ') else line[1:]
- for line in matches[0].group('content').splitlines(keepends=True)
- )
- return tomllib.loads(content)
- else:
- return {}
-
-
-async def _find_import_dependencies(code: str) -> list[str] | None:
- """Find dependencies in imports."""
- try:
- imports: list[str] = find_imports(code)
- except SyntaxError:
- return None
- else:
- return list(_find_imports_to_install(imports))
-
-
-TO_PACKAGE_NAME: dict[str, str] = pyodide_js._api._import_name_to_package_name.to_py() # pyright: ignore[reportPrivateUsage]
-
-
-def _find_imports_to_install(imports: list[str]) -> Iterable[str]:
- """Given a list of module names being imported, return packages that are not installed."""
- for module in imports:
- try:
- importlib.import_module(module)
- except ModuleNotFoundError:
- if package_name := TO_PACKAGE_NAME.get(module):
- yield package_name
- elif '.' not in module:
- yield module
diff --git a/mcp-run-python/src/runCode.ts b/mcp-run-python/src/runCode.ts
deleted file mode 100644
index 691c3c860e..0000000000
--- a/mcp-run-python/src/runCode.ts
+++ /dev/null
@@ -1,171 +0,0 @@
-/* eslint @typescript-eslint/no-explicit-any: off */
-import { loadPyodide } from 'pyodide'
-import { preparePythonCode } from './prepareEnvCode.ts'
-import type { LoggingLevel } from '@modelcontextprotocol/sdk/types.js'
-
-export interface CodeFile {
- name: string
- content: string
- active: boolean
-}
-
-export async function runCode(
- files: CodeFile[],
- log: (level: LoggingLevel, data: string) => void,
-): Promise {
- // remove once we can upgrade to pyodide 0.27.7 and console.log is no longer used.
- const realConsoleLog = console.log
- // deno-lint-ignore no-explicit-any
- console.log = (...args: any[]) => log('debug', args.join(' '))
-
- const output: string[] = []
- const pyodide = await loadPyodide({
- stdout: (msg) => {
- log('info', msg)
- output.push(msg)
- },
- stderr: (msg) => {
- log('warning', msg)
- output.push(msg)
- },
- })
-
- // see https://github.com/pyodide/pyodide/discussions/5512
- const origLoadPackage = pyodide.loadPackage
- pyodide.loadPackage = (pkgs, options) =>
- origLoadPackage(pkgs, {
- // stop pyodide printing to stdout/stderr
- messageCallback: (msg: string) => log('debug', `loadPackage: ${msg}`),
- errorCallback: (msg: string) => {
- log('error', `loadPackage: ${msg}`)
- output.push(`install error: ${msg}`)
- },
- ...options,
- })
-
- await pyodide.loadPackage(['micropip', 'pydantic'])
- const sys = pyodide.pyimport('sys')
-
- const dirPath = '/tmp/mcp_run_python'
- sys.path.append(dirPath)
- const pathlib = pyodide.pyimport('pathlib')
- pathlib.Path(dirPath).mkdir()
- const moduleName = '_prepare_env'
-
- pathlib.Path(`${dirPath}/${moduleName}.py`).write_text(preparePythonCode)
-
- const preparePyEnv: PreparePyEnv = pyodide.pyimport(moduleName)
-
- const prepareStatus = await preparePyEnv.prepare_env(pyodide.toPy(files))
-
- let runResult: RunSuccess | RunError
- if (prepareStatus.kind == 'error') {
- runResult = {
- status: 'install-error',
- output,
- error: prepareStatus.message,
- }
- } else {
- const { dependencies } = prepareStatus
- const activeFile = files.find((f) => f.active)! || files[0]
- try {
- const rawValue = await pyodide.runPythonAsync(activeFile.content, {
- globals: pyodide.toPy({ __name__: '__main__' }),
- filename: activeFile.name,
- })
- runResult = {
- status: 'success',
- dependencies,
- output,
- returnValueJson: preparePyEnv.dump_json(rawValue),
- }
- } catch (err) {
- runResult = {
- status: 'run-error',
- dependencies,
- output,
- error: formatError(err),
- }
- }
- }
- sys.stdout.flush()
- sys.stderr.flush()
- console.log = realConsoleLog
- return runResult
-}
-
-interface RunSuccess {
- status: 'success'
- // we could record stdout and stderr separately, but I suspect simplicity is more important
- output: string[]
- dependencies: string[]
- returnValueJson: string | null
-}
-
-interface RunError {
- status: 'install-error' | 'run-error'
- output: string[]
- dependencies?: string[]
- error: string
-}
-
-export function asXml(runResult: RunSuccess | RunError): string {
- const xml = [`${runResult.status}`]
- if (runResult.dependencies?.length) {
- xml.push(
- `${JSON.stringify(runResult.dependencies)}`,
- )
- }
- if (runResult.output.length) {
- xml.push('')
- }
- if (runResult.status == 'success') {
- if (runResult.returnValueJson) {
- xml.push('')
- xml.push(escapeClosing('return_value')(runResult.returnValueJson))
- xml.push('')
- }
- } else {
- xml.push('')
- xml.push(escapeClosing('error')(runResult.error))
- xml.push('')
- }
- return xml.join('\n')
-}
-
-function escapeClosing(closingTag: string): (str: string) => string {
- const regex = new RegExp(`?\\s*${closingTag}(?:.*?>)?`, 'gi')
- const onMatch = (match: string) => {
- return match.replace(//g, '>')
- }
- return (str) => str.replace(regex, onMatch)
-}
-
-// deno-lint-ignore no-explicit-any
-function formatError(err: any): string {
- let errStr = err.toString()
- errStr = errStr.replace(/^PythonError: +/, '')
- // remove frames from inside pyodide
- errStr = errStr.replace(
- / {2}File "\/lib\/python\d+\.zip\/_pyodide\/.*\n {4}.*\n(?: {4,}\^+\n)?/g,
- '',
- )
- return errStr
-}
-
-interface PrepareSuccess {
- kind: 'success'
- dependencies: string[]
-}
-interface PrepareError {
- kind: 'error'
- message: string
-}
-interface PreparePyEnv {
- prepare_env: (files: CodeFile[]) => Promise
- // deno-lint-ignore no-explicit-any
- dump_json: (value: any) => string | null
-}
diff --git a/mcp-run-python/stubs/pyodide/code.pyi b/mcp-run-python/stubs/pyodide/code.pyi
deleted file mode 100644
index 1e191b6e50..0000000000
--- a/mcp-run-python/stubs/pyodide/code.pyi
+++ /dev/null
@@ -1,2 +0,0 @@
-def find_imports(code: str) -> list[str]:
- ...
diff --git a/mcp-run-python/stubs/pyodide_js.pyi b/mcp-run-python/stubs/pyodide_js.pyi
deleted file mode 100644
index 31373aeab3..0000000000
--- a/mcp-run-python/stubs/pyodide_js.pyi
+++ /dev/null
@@ -1,9 +0,0 @@
-class _ToPy:
- def to_py(self) -> dict[str, str]:
- ...
-
-
-class _Api:
- _import_name_to_package_name: _ToPy
-
-_api: _Api
diff --git a/mcp-run-python/test_mcp_servers.py b/mcp-run-python/test_mcp_servers.py
deleted file mode 100644
index 3fd72927f1..0000000000
--- a/mcp-run-python/test_mcp_servers.py
+++ /dev/null
@@ -1,231 +0,0 @@
-from __future__ import annotations as _annotations
-
-import asyncio
-import re
-import subprocess
-from collections.abc import AsyncIterator
-from pathlib import Path
-from typing import TYPE_CHECKING
-
-import pytest
-from httpx import AsyncClient, HTTPError
-from inline_snapshot import snapshot
-from mcp import ClientSession, StdioServerParameters, types
-from mcp.client.sse import sse_client
-from mcp.client.stdio import stdio_client
-from mcp.client.streamable_http import streamablehttp_client
-
-if TYPE_CHECKING:
- from mcp import ClientSession
-
-pytestmark = pytest.mark.anyio
-DENO_ARGS = [
- 'run',
- '-N',
- '-R=mcp-run-python/node_modules',
- '-W=mcp-run-python/node_modules',
- '--node-modules-dir=auto',
- 'mcp-run-python/src/main.ts',
-]
-
-
-@pytest.fixture
-def anyio_backend():
- return 'asyncio'
-
-
-@pytest.fixture(name='mcp_session', params=['stdio', 'sse', 'streamable_http'])
-async def fixture_mcp_session(request: pytest.FixtureRequest) -> AsyncIterator[ClientSession]:
- if request.param == 'stdio':
- server_params = StdioServerParameters(command='deno', args=[*DENO_ARGS, 'stdio'])
- async with stdio_client(server_params) as (read, write):
- async with ClientSession(read, write) as session:
- yield session
- elif request.param == 'streamable_http':
- port = 3101
- p = subprocess.Popen(['deno', *DENO_ARGS, 'streamable_http', f'--port={port}'])
- try:
- url = f'http://localhost:{port}/mcp'
-
- async with AsyncClient() as client:
- for _ in range(10):
- try:
- await client.get(url, timeout=0.01)
- except HTTPError:
- await asyncio.sleep(0.1)
- else:
- break
-
- async with streamablehttp_client(url) as (read_stream, write_stream, _):
- async with ClientSession(read_stream, write_stream) as session:
- yield session
-
- finally:
- p.terminate()
- exit_code = p.wait()
- if exit_code > 0:
- pytest.fail(f'Process exited with code {exit_code}')
-
- else:
- port = 3101
-
- p = subprocess.Popen(['deno', *DENO_ARGS, 'sse', f'--port={port}'])
- try:
- url = f'http://localhost:{port}'
- async with AsyncClient() as client:
- for _ in range(10):
- try:
- await client.get(url, timeout=0.01)
- except HTTPError:
- await asyncio.sleep(0.1)
- else:
- break
-
- async with sse_client(f'{url}/sse') as (read, write):
- async with ClientSession(read, write) as session:
- yield session
- finally:
- p.terminate()
- exit_code = p.wait()
- if exit_code > 0:
- pytest.fail(f'Process exited with code {exit_code}')
-
-
-async def test_list_tools(mcp_session: ClientSession) -> None:
- await mcp_session.initialize()
- tools = await mcp_session.list_tools()
- assert len(tools.tools) == 1
- tool = tools.tools[0]
- assert tool.name == 'run_python_code'
- assert tool.description
- assert tool.description.startswith('Tool to execute Python code and return stdout, stderr, and return value.')
- assert tool.inputSchema['properties'] == snapshot(
- {'python_code': {'type': 'string', 'description': 'Python code to run'}}
- )
-
-
-@pytest.mark.parametrize(
- 'code,expected_output',
- [
- pytest.param(
- [
- 'x = 4',
- "print(f'{x=}')",
- 'x',
- ],
- snapshot("""\
-success
-
-
-4
-\
-"""),
- id='basic-code',
- ),
- pytest.param(
- [
- 'import numpy',
- 'numpy.array([1, 2, 3])',
- ],
- snapshot("""\
-success
-["numpy"]
-
-[
- 1,
- 2,
- 3
-]
-\
-"""),
- id='import-numpy',
- ),
- pytest.param(
- [
- '# /// script',
- '# dependencies = ["pydantic", "email-validator"]',
- '# ///',
- 'import pydantic',
- 'class Model(pydantic.BaseModel):',
- ' email: pydantic.EmailStr',
- "Model(email='hello@pydantic.dev')",
- ],
- snapshot("""\
-success
-["pydantic","email-validator"]
-
-{
- "email": "hello@pydantic.dev"
-}
-\
-"""),
- id='magic-comment-import',
- ),
- pytest.param(
- [
- 'print(unknown)',
- ],
- snapshot("""\
-run-error
-
-Traceback (most recent call last):
- File "main.py", line 1, in
- print(unknown)
- ^^^^^^^
-NameError: name 'unknown' is not defined
-
-\
-"""),
- id='undefined-variable',
- ),
- ],
-)
-async def test_run_python_code(mcp_session: ClientSession, code: list[str], expected_output: str) -> None:
- await mcp_session.initialize()
- result = await mcp_session.call_tool('run_python_code', {'python_code': '\n'.join(code)})
- assert len(result.content) == 1
- content = result.content[0]
- assert isinstance(content, types.TextContent)
- assert content.text == expected_output
-
-
-async def test_install_run_python_code() -> None:
- node_modules = Path(__file__).parent / 'node_modules'
- if node_modules.exists():
- # shutil.rmtree can't delete node_modules :-(
- subprocess.run(['rm', '-r', node_modules], check=True)
-
- logs: list[str] = []
-
- async def logging_callback(params: types.LoggingMessageNotificationParams) -> None:
- logs.append(f'{params.level}: {params.data}')
-
- server_params = StdioServerParameters(command='deno', args=[*DENO_ARGS, 'stdio'])
- async with stdio_client(server_params) as (read, write):
- async with ClientSession(read, write, logging_callback=logging_callback) as mcp_session:
- await mcp_session.initialize()
- await mcp_session.set_logging_level('debug')
- result = await mcp_session.call_tool(
- 'run_python_code', {'python_code': 'import numpy\nnumpy.array([1, 2, 3])'}
- )
- assert len(result.content) == 1
- content = result.content[0]
- assert isinstance(content, types.TextContent)
- expected_output = """\
-success
-["numpy"]
-
-[
- 1,
- 2,
- 3
-]
-\
-"""
- assert content.text == expected_output
- assert len(logs) >= 18
- assert re.search(
- r"debug: Didn't find package numpy\S+?\.whl locally, attempting to load from", '\n'.join(logs)
- )
diff --git a/mkdocs.yml b/mkdocs.yml
index 22bb799edb..1d49e46be0 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -60,14 +60,13 @@ nav:
- Overview: graph.md
- Integrations:
- - Debugging & Monitoring with Pydantic Logfire: logfire.md
- - temporal.md
- - Agent-User Interaction (AG-UI): ag-ui.md
- - Agent2Agent (A2A): a2a.md
+ - Debugging & Monitoring with Pydantic Logfire: logfire.md
+ - temporal.md
+ - Agent-User Interaction (AG-UI): ag-ui.md
+ - Agent2Agent (A2A): a2a.md
- Related Packages:
- Clai: cli.md
- - MCP Run Python: mcp/run-python.md
- Examples:
- Setup: examples/setup.md
@@ -308,9 +307,9 @@ plugins:
- examples/*.md
- redirects:
redirect_maps:
- 'examples/index.md': 'examples/setup.md'
- 'mcp/index.md': 'mcp/overview.md'
- 'models/index.md': 'models/overview.md'
+ "examples/index.md": "examples/setup.md"
+ "mcp/index.md": "mcp/overview.md"
+ "models/index.md": "models/overview.md"
hooks:
- "docs/.hooks/main.py"
diff --git a/pydantic_ai_slim/pydantic_ai/mcp.py b/pydantic_ai_slim/pydantic_ai/mcp.py
index 168405723b..c2304585e4 100644
--- a/pydantic_ai_slim/pydantic_ai/mcp.py
+++ b/pydantic_ai_slim/pydantic_ai/mcp.py
@@ -400,16 +400,7 @@ class MCPServerStdio(MCPServer):
from pydantic_ai.mcp import MCPServerStdio
server = MCPServerStdio( # (1)!
- 'deno',
- args=[
- 'run',
- '-N',
- '-R=node_modules',
- '-W=node_modules',
- '--node-modules-dir=auto',
- 'jsr:@pydantic/mcp-run-python',
- 'stdio',
- ]
+ 'uv', args=['run', 'mcp-run-python', 'stdio'], timeout=10
)
agent = Agent('openai:gpt-4o', toolsets=[server])
@@ -418,7 +409,7 @@ async def main():
...
```
- 1. See [MCP Run Python](../mcp/run-python.md) for more information.
+ 1. See [MCP Run Python](https://github.com/pydantic/mcp-run-python) for more information.
2. This will start the server as a subprocess and connect to it.
"""
@@ -731,16 +722,15 @@ class MCPServerSSE(_MCPServerHTTP):
from pydantic_ai import Agent
from pydantic_ai.mcp import MCPServerSSE
- server = MCPServerSSE('http://localhost:3001/sse') # (1)!
+ server = MCPServerSSE('http://localhost:3001/sse')
agent = Agent('openai:gpt-4o', toolsets=[server])
async def main():
- async with agent: # (2)!
+ async with agent: # (1)!
...
```
- 1. E.g. you might be connecting to a server run with [`mcp-run-python`](../mcp/run-python.md).
- 2. This will connect to a server running on `localhost:3001`.
+ 1. This will connect to a server running on `localhost:3001`.
"""
@property
@@ -764,7 +754,7 @@ class MCPServerHTTP(MCPServerSSE):
from pydantic_ai import Agent
from pydantic_ai.mcp import MCPServerHTTP
- server = MCPServerHTTP('http://localhost:3001/sse') # (1)!
+ server = MCPServerHTTP('http://localhost:3001/sse')
agent = Agent('openai:gpt-4o', toolsets=[server])
async def main():
@@ -772,8 +762,7 @@ async def main():
...
```
- 1. E.g. you might be connecting to a server run with [`mcp-run-python`](../mcp/run-python.md).
- 2. This will connect to a server running on `localhost:3001`.
+ 1. This will connect to a server running on `localhost:3001`.
"""
diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml
index c35e8df6b9..bf35813b45 100644
--- a/pydantic_ai_slim/pyproject.toml
+++ b/pydantic_ai_slim/pyproject.toml
@@ -81,9 +81,14 @@ huggingface = ["huggingface-hub[inference]>=0.33.5"]
duckduckgo = ["ddgs>=9.0.0"]
tavily = ["tavily-python>=0.5.0"]
# CLI
-cli = ["rich>=13", "prompt-toolkit>=3", "argcomplete>=3.5.0", "pyperclip>=1.9.0"]
+cli = [
+ "rich>=13",
+ "prompt-toolkit>=3",
+ "argcomplete>=3.5.0",
+ "pyperclip>=1.9.0",
+]
# MCP
-mcp = ["mcp>=1.12.3; python_version >= '3.10'"]
+mcp = ["mcp>=1.12.3"]
# Evals
evals = ["pydantic-evals=={{ version }}"]
# A2A
diff --git a/pyproject.toml b/pyproject.toml
index aafbbd710b..1ae5ad5965 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -74,7 +74,6 @@ members = [
"pydantic_ai_slim",
"pydantic_evals",
"pydantic_graph",
- "mcp-run-python",
"clai",
"examples",
]
@@ -103,6 +102,7 @@ dev = [
# Needed for PyCharm users
"pip>=25.2",
"genai-prices>=0.0.22",
+ "mcp-run-python>=0.0.20",
]
lint = ["mypy>=1.11.2", "pyright>=1.1.390", "ruff>=0.6.9"]
docs = [
@@ -131,7 +131,6 @@ include = [
"pydantic_ai_slim/**/*.py",
"pydantic_evals/**/*.py",
"pydantic_graph/**/*.py",
- "mcp-run-python/**/*.py",
"examples/**/*.py",
"clai/**/*.py",
"tests/**/*.py",
@@ -177,7 +176,6 @@ docstring-code-format = false
quote-style = "single"
[tool.ruff.lint.per-file-ignores]
-"mcp-run-python/**/*.py" = ["D", "TID251"]
"examples/**/*.py" = ["D101", "D103"]
"tests/**/*.py" = ["D"]
"docs/**/*.py" = ["D"]
@@ -193,7 +191,6 @@ include = [
"pydantic_ai_slim",
"pydantic_evals",
"pydantic_graph",
- "mcp-run-python",
"tests",
"examples",
"clai",
@@ -207,10 +204,8 @@ executionEnvironments = [
]
exclude = [
"examples/pydantic_ai_examples/weather_agent_gradio.py",
- "mcp-run-python/node_modules",
- "pydantic_ai_slim/pydantic_ai/ext/aci.py", # aci-sdk is too niche to be added as an (optional) dependency
+ "pydantic_ai_slim/pydantic_ai/ext/aci.py", # aci-sdk is too niche to be added as an (optional) dependency
]
-extraPaths = ["mcp-run-python/stubs"]
[tool.mypy]
files = "tests/typed_agent.py,tests/typed_graph.py"
@@ -267,7 +262,7 @@ disable_warnings = ["include-ignored"]
source = [
".",
"/home/runner/work/pydantic-ai/pydantic-ai",
- "/System/Volumes/Data/home/runner/work/pydantic-ai/pydantic-ai"
+ "/System/Volumes/Data/home/runner/work/pydantic-ai/pydantic-ai",
]
# https://coverage.readthedocs.io/en/latest/config.html#report
diff --git a/tests/test_examples.py b/tests/test_examples.py
index b52d70f3ce..f538435cc8 100644
--- a/tests/test_examples.py
+++ b/tests/test_examples.py
@@ -305,6 +305,7 @@ async def call_tool(
'Use the web to get the current time.': "In San Francisco, it's 8:21:41 pm PDT on Wednesday, August 6, 2025.",
'Give me a sentence with the biggest news in AI this week.': 'Scientists have developed a universal AI detector that can identify deepfake videos.',
'How many days between 2000-01-01 and 2025-03-18?': 'There are 9,208 days between January 1, 2000, and March 18, 2025.',
+ 'What is 7 plus 5?': 'The answer is 12.',
'What is the weather like in West London and in Wiltshire?': (
'The weather in West London is raining, while in Wiltshire it is sunny.'
),
diff --git a/uv.lock b/uv.lock
index ed8de81594..c2ef3431c9 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,5 +1,5 @@
version = 1
-revision = 3
+revision = 2
requires-python = ">=3.10"
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy'",
@@ -15,7 +15,6 @@ resolution-markers = [
[manifest]
members = [
"clai",
- "mcp-run-python",
"pydantic-ai",
"pydantic-ai-examples",
"pydantic-ai-slim",
@@ -1800,7 +1799,7 @@ wheels = [
[[package]]
name = "mcp"
-version = "1.12.3"
+version = "1.13.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -1815,9 +1814,9 @@ dependencies = [
{ name = "starlette" },
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/4d/19/9955e2df5384ff5dd25d38f8e88aaf89d2d3d9d39f27e7383eaf0b293836/mcp-1.12.3.tar.gz", hash = "sha256:ab2e05f5e5c13e1dc90a4a9ef23ac500a6121362a564447855ef0ab643a99fed", size = 427203, upload-time = "2025-07-31T18:36:36.795Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/66/3c/82c400c2d50afdac4fbefb5b4031fd327e2ad1f23ccef8eee13c5909aa48/mcp-1.13.1.tar.gz", hash = "sha256:165306a8fd7991dc80334edd2de07798175a56461043b7ae907b279794a834c5", size = 438198, upload-time = "2025-08-22T09:22:16.061Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/8f/8b/0be74e3308a486f1d127f3f6767de5f9f76454c9b4183210c61cc50999b6/mcp-1.12.3-py3-none-any.whl", hash = "sha256:5483345bf39033b858920a5b6348a303acacf45b23936972160ff152107b850e", size = 158810, upload-time = "2025-07-31T18:36:34.915Z" },
+ { url = "https://files.pythonhosted.org/packages/19/3f/d085c7f49ade6d273b185d61ec9405e672b6433f710ea64a90135a8dd445/mcp-1.13.1-py3-none-any.whl", hash = "sha256:c314e7c8bd477a23ba3ef472ee5a32880316c42d03e06dcfa31a1cc7a73b65df", size = 161494, upload-time = "2025-08-22T09:22:14.705Z" },
]
[package.optional-dependencies]
@@ -1828,33 +1827,14 @@ cli = [
[[package]]
name = "mcp-run-python"
-version = "0.0.1"
-source = { virtual = "mcp-run-python" }
-
-[package.dev-dependencies]
-dev = [
- { name = "anyio" },
- { name = "dirty-equals" },
- { name = "httpx" },
- { name = "inline-snapshot" },
+version = "0.0.20"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
{ name = "mcp" },
- { name = "micropip", marker = "python_full_version >= '3.12'" },
- { name = "pytest" },
- { name = "pytest-pretty" },
]
-
-[package.metadata]
-
-[package.metadata.requires-dev]
-dev = [
- { name = "anyio", specifier = ">=4.5.0" },
- { name = "dirty-equals", specifier = ">=0.9.0" },
- { name = "httpx", specifier = ">=0.28.1" },
- { name = "inline-snapshot", specifier = ">=0.19.3" },
- { name = "mcp", marker = "python_full_version >= '3.10'", specifier = ">=1.4.1" },
- { name = "micropip", marker = "python_full_version >= '3.12'", specifier = ">=0.9.0" },
- { name = "pytest", specifier = ">=8.3.3" },
- { name = "pytest-pretty", specifier = ">=1.2.0" },
+sdist = { url = "https://files.pythonhosted.org/packages/85/47/1b279d51a71aa475215376024d14c7b3c0deb64e604b6178913cdcfe77f4/mcp_run_python-0.0.20.tar.gz", hash = "sha256:5988d293b1130d56624c35b927b95596e92b23b77e14ca0f1fd320f33fef10de", size = 23232, upload-time = "2025-09-02T21:19:39.148Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4e/09/3d834a7df11f70481f99f871a077d129ac309939eb2982c1912b17645986/mcp_run_python-0.0.20-py3-none-any.whl", hash = "sha256:2c62759add24626d9e25abd953342ffe39ff9a684803ca9de03c8c025aae5e83", size = 26042, upload-time = "2025-09-02T21:19:38.197Z" },
]
[[package]]
@@ -1888,15 +1868,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
]
-[[package]]
-name = "micropip"
-version = "0.9.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/3d/6f/aeef5f7696480704c133206511ff785dd86b76b6eba77391d14de162c1ab/micropip-0.9.0.tar.gz", hash = "sha256:d72c3224537a14e5d9d02f29e945bf7cde02404ec744291e8f6a310cedf60b66", size = 1783814, upload-time = "2025-02-01T15:55:25.23Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/27/6d/195810e3e73e5f351dc6082cada41bb4d5b0746a6804155ba6bae4304612/micropip-0.9.0-py3-none-any.whl", hash = "sha256:babcf68c1849e229aa887564bc5f2de50273c3212e73c9aff8e3e6e80f2b6a23", size = 114896, upload-time = "2025-02-01T15:55:23.436Z" },
-]
-
[[package]]
name = "mistralai"
version = "1.9.2"
@@ -3009,6 +2980,7 @@ dev = [
{ name = "duckduckgo-search" },
{ name = "genai-prices" },
{ name = "inline-snapshot" },
+ { name = "mcp-run-python" },
{ name = "pip" },
{ name = "pytest" },
{ name = "pytest-examples" },
@@ -3059,6 +3031,7 @@ dev = [
{ name = "duckduckgo-search", specifier = ">=7.0.0" },
{ name = "genai-prices", specifier = ">=0.0.22" },
{ name = "inline-snapshot", specifier = ">=0.19.3" },
+ { name = "mcp-run-python", specifier = ">=0.0.20" },
{ name = "pip", specifier = ">=25.2" },
{ name = "pytest", specifier = ">=8.3.3" },
{ name = "pytest-examples", specifier = ">=0.0.18" },
@@ -3227,7 +3200,7 @@ requires-dist = [
{ name = "httpx", specifier = ">=0.27" },
{ name = "huggingface-hub", extras = ["inference"], marker = "extra == 'huggingface'", specifier = ">=0.33.5" },
{ name = "logfire", extras = ["httpx"], marker = "extra == 'logfire'", specifier = ">=3.14.1" },
- { name = "mcp", marker = "python_full_version >= '3.10' and extra == 'mcp'", specifier = ">=1.12.3" },
+ { name = "mcp", marker = "extra == 'mcp'", specifier = ">=1.12.3" },
{ name = "mistralai", marker = "extra == 'mistral'", specifier = ">=1.9.2" },
{ name = "openai", marker = "extra == 'openai'", specifier = ">=1.99.9" },
{ name = "opentelemetry-api", specifier = ">=1.28.0" },