diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f29afeaff..e761bb6ba7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: cache-suffix: lint - name: Install dependencies - run: uv sync --all-extras --all-packages --group lint + run: uv sync --all-extras --no-extra outlines-vllm-offline --all-packages --group lint - uses: pre-commit/action@v3.0.0 with: @@ -113,7 +113,7 @@ jobs: - name: standard command: "" - name: all-extras - command: "--all-extras" + command: "--all-extras --no-extra outlines-vllm-offline" env: CI: true COVERAGE_PROCESS_START: ./pyproject.toml @@ -194,7 +194,7 @@ jobs: - run: unset UV_FROZEN - - run: uv run --all-extras --resolution lowest-direct coverage run -m pytest --durations=100 -n auto --dist=loadgroup + - run: uv run --all-extras --no-extra outlines-vllm-offline --resolution lowest-direct coverage run -m pytest --durations=100 -n auto --dist=loadgroup env: COVERAGE_FILE: .coverage/.coverage.${{matrix.python-version}}-lowest-versions @@ -232,7 +232,7 @@ jobs: restore-keys: | hf-${{ runner.os }}- - - run: uv run --all-extras python tests/import_examples.py + - run: uv run --all-extras --no-extra outlines-vllm-offline python tests/import_examples.py coverage: runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index dbdfd83ee0..71791d4aa3 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ __pycache__ /scratch/ /.coverage env*/ +!**/environments/ /TODO.md /postgres-data/ .DS_Store diff --git a/Makefile b/Makefile index 067ad11dec..51fa6627c8 100644 --- a/Makefile +++ b/Makefile @@ -10,19 +10,19 @@ .PHONY: install install: .uv .pre-commit ## Install the package, dependencies, and pre-commit for local development - uv sync --frozen --all-extras --all-packages --group lint --group docs + uv sync --frozen --all-extras --no-extra outlines-vllm-offline --all-packages --group lint --group docs pre-commit install --install-hooks .PHONY: install-all-python install-all-python: ## Install and synchronize an interpreter for every python version - UV_PROJECT_ENVIRONMENT=.venv310 uv sync --python 3.10 --frozen --all-extras --all-packages --group lint --group docs - UV_PROJECT_ENVIRONMENT=.venv311 uv sync --python 3.11 --frozen --all-extras --all-packages --group lint --group docs - UV_PROJECT_ENVIRONMENT=.venv312 uv sync --python 3.12 --frozen --all-extras --all-packages --group lint --group docs - UV_PROJECT_ENVIRONMENT=.venv313 uv sync --python 3.13 --frozen --all-extras --all-packages --group lint --group docs + UV_PROJECT_ENVIRONMENT=.venv310 uv sync --python 3.10 --frozen --all-extras --no-extra outlines-vllm-offline --all-packages --group lint --group docs + UV_PROJECT_ENVIRONMENT=.venv311 uv sync --python 3.11 --frozen --all-extras --no-extra outlines-vllm-offline --all-packages --group lint --group docs + UV_PROJECT_ENVIRONMENT=.venv312 uv sync --python 3.12 --frozen --all-extras --no-extra outlines-vllm-offline --all-packages --group lint --group docs + UV_PROJECT_ENVIRONMENT=.venv313 uv sync --python 3.13 --frozen --all-extras --no-extra outlines-vllm-offline --all-packages --group lint --group docs .PHONY: sync sync: .uv ## Update local packages and uv.lock - uv sync --all-extras --all-packages --group lint --group docs + uv sync --all-extras --no-extra outlines-vllm-offline --all-packages --group lint --group docs .PHONY: format format: ## Format the code @@ -57,10 +57,10 @@ test: ## Run tests without coverage (fast, for local dev) .PHONY: test-all-python test-all-python: ## Run tests on Python 3.10 to 3.13 - COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv310 uv run --python 3.10 --all-extras --all-packages coverage run -p -m pytest - COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv311 uv run --python 3.11 --all-extras --all-packages coverage run -p -m pytest - COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv312 uv run --python 3.12 --all-extras --all-packages coverage run -p -m pytest - COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv313 uv run --python 3.13 --all-extras --all-packages coverage run -p -m pytest + COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv310 uv run --python 3.10 --all-extras --no-extra outlines-vllm-offline --all-packages coverage run -p -m pytest + COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv311 uv run --python 3.11 --all-extras --no-extra outlines-vllm-offline --all-packages coverage run -p -m pytest + COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv312 uv run --python 3.12 --all-extras --no-extra outlines-vllm-offline --all-packages coverage run -p -m pytest + COLUMNS=150 UV_PROJECT_ENVIRONMENT=.venv313 uv run --python 3.13 --all-extras --no-extra outlines-vllm-offline --all-packages coverage run -p -m pytest @uv run coverage combine @uv run coverage report diff --git a/docs/api/environments.md b/docs/api/environments.md new file mode 100644 index 0000000000..3cc75090ae --- /dev/null +++ b/docs/api/environments.md @@ -0,0 +1,31 @@ +# `pydantic_ai.environments` + +::: pydantic_ai.environments + options: + members: + - EnvToolName + - ExecutionEnvironment + - ExecutionEnvironmentToolset + - ExecutionProcess + - ExecutionResult + +## `pydantic_ai.environments.local` + +::: pydantic_ai.environments.local + options: + members: + - LocalEnvironment + +## `pydantic_ai.environments.docker` + +::: pydantic_ai.environments.docker + options: + members: + - DockerEnvironment + +## `pydantic_ai.environments.memory` + +::: pydantic_ai.environments.memory + options: + members: + - MemoryEnvironment diff --git a/docs/environments.md b/docs/environments.md new file mode 100644 index 0000000000..52310093db --- /dev/null +++ b/docs/environments.md @@ -0,0 +1,346 @@ +# Execution Environments & Sandboxes + +Pydantic AI provides [`ExecutionEnvironment`][pydantic_ai.environments.ExecutionEnvironment] — an abstraction for environments where agents can execute commands, read/write files, and search the filesystem — along with [`ExecutionEnvironmentToolset`][pydantic_ai.environments.ExecutionEnvironmentToolset], a ready-made [toolset](toolsets.md) that exposes these capabilities as tools. + +This is the foundation for building coding agents, data analysis bots, and other agents that need to interact with a shell and filesystem. + +## Quick Start + +```python {title="environments_quickstart.py"} +from pydantic_ai import Agent +from pydantic_ai.environments import ExecutionEnvironmentToolset +from pydantic_ai.environments.local import LocalEnvironment + +env = LocalEnvironment(root_dir='/tmp/workspace') +toolset = ExecutionEnvironmentToolset(env) + +agent = Agent('openai:gpt-5.2', toolsets=[toolset]) + +async def main(): + async with env: + result = await agent.run('Create a Python script that prints the first 10 Fibonacci numbers, then run it.') + print(result.output) + #> Done! The first 10 Fibonacci numbers are: 0, 1, 1, 2, 3, 5, 8, 13, 21, 34 +``` + +## Environments + +An [`ExecutionEnvironment`][pydantic_ai.environments.ExecutionEnvironment] defines where and how commands run. Three implementations are included: + +| Environment | Isolation | Use case | +|---|---|---| +| [`LocalEnvironment`][pydantic_ai.environments.local.LocalEnvironment] | None — runs on host | Development, testing, trusted agents | +| [`DockerEnvironment`][pydantic_ai.environments.docker.DockerEnvironment] | Container-level | Production, untrusted code | +| [`MemoryEnvironment`][pydantic_ai.environments.memory.MemoryEnvironment] | In-memory (no filesystem) | Unit testing | + +All environments are async context managers. Enter the environment before running the agent, and exit it to clean up: + +```python {title="environments_lifecycle.py"} +from pydantic_ai.environments.docker import DockerEnvironment + +env = DockerEnvironment(image='python:3.12-slim') + +async def main(): + async with env: + result = await env.shell('python -c "print(42)"') + print(result.output) + """ + 42 + """ + #> +``` + +### LocalEnvironment + +[`LocalEnvironment`][pydantic_ai.environments.local.LocalEnvironment] runs commands as local subprocesses within a specified root directory. It provides no isolation — use it for development, testing, and trusted agents. + +```python {title="environments_local.py"} +from pydantic_ai.environments.local import LocalEnvironment + +env = LocalEnvironment( + root_dir='/tmp/workspace', + env_vars={'PYTHONPATH': '/tmp/workspace/lib'}, + inherit_env=True, # inherit host environment variables (default) +) +``` + +File operations (read, write, edit) are confined to the root directory — path traversal attempts raise `PermissionError`. + +!!! info "Environment variable inheritance" + By default, `LocalEnvironment` inherits the host's environment variables. Set `inherit_env=False` for a clean environment where only explicitly provided `env_vars` (and per-call `env` overrides) are available. This is useful for reproducibility and testing. + +### DockerEnvironment + +[`DockerEnvironment`][pydantic_ai.environments.docker.DockerEnvironment] runs commands inside a Docker container with configurable resource limits, security options, and network access. + +Requires the `docker` package: `pip install pydantic-ai-slim[docker-environment]` + +```python {title="environments_docker.py"} +from pydantic_ai.environments.docker import DockerEnvironment + +env = DockerEnvironment( + image='my-sandbox:latest', + env_vars={'MPLBACKEND': 'Agg'}, + memory_limit='512m', + cpu_limit=1.0, + network_disabled=True, +) +``` + +#### Building a custom Docker image + +`DockerEnvironment` runs whatever image you give it — it doesn't install packages at startup. Pre-build a custom image with any libraries your agent needs, so containers start fast and reproducibly. + +**Example Dockerfile** — a Python data-science sandbox: + +```dockerfile {title="Dockerfile" test="skip" lint="skip"} +FROM python:3.12-slim + +# Install OS-level tools the agent might use (optional) +RUN apt-get update \ + && apt-get install -y --no-install-recommends git curl jq \ + && rm -rf /var/lib/apt/lists/* + +# Install Python packages +RUN pip install --no-cache-dir numpy pandas matplotlib requests + +WORKDIR /workspace +``` + +Build and tag the image: + +```bash +docker build -t my-sandbox:latest . +``` + +Then pass the tag to `DockerEnvironment`: + +```python {title="environments_docker_custom.py"} +from pydantic_ai.environments.docker import DockerEnvironment + +env = DockerEnvironment(image='my-sandbox:latest') +``` + +!!! tip "Tips for custom images" + + - **Start from a slim base** (`python:3.12-slim`, `node:22-slim`, etc.) to keep image size and attack surface small. + - **Pin package versions** (e.g. `numpy==2.2.3`) for reproducible builds. + - **Use `--no-cache-dir`** with pip to avoid bloating the image with cached wheels. + - **Build once, run many times.** The image is pulled from the local Docker cache on each `DockerEnvironment` startup — no rebuild needed. + - **Use a registry** for team or CI workflows: push your image to Docker Hub, GitHub Container Registry, or a private registry, then reference it by its full name (e.g. `ghcr.io/myorg/my-sandbox:latest`). + - **For Node.js** or other runtimes, adjust the base image and install command accordingly: + + ```dockerfile {test="skip" lint="skip"} + FROM node:22-slim + RUN npm install -g typescript ts-node express + WORKDIR /workspace + ``` + +For running untrusted code, you can harden the container with Linux security options: + +```python {title="environments_docker_hardened.py"} +from pydantic_ai.environments.docker import DockerEnvironment + +env = DockerEnvironment.hardened(image='python:3.12-slim') +``` + +This uses the [`hardened()`][pydantic_ai.environments.docker.DockerEnvironment.hardened] convenience constructor, which sets sensible security defaults: network disabled, read-only root filesystem, all capabilities dropped, no privilege escalation, runs as `nobody`, uses an init process, and limits PIDs, memory, and CPU. You can customize the resource limits: + +```python {title="environments_docker_hardened_custom.py"} +from pydantic_ai.environments.docker import DockerEnvironment + +env = DockerEnvironment.hardened( + image='my-sandbox:latest', + memory_limit='1g', + cpu_limit=2.0, + pids_limit=512, +) +``` + +## ExecutionEnvironmentToolset + +[`ExecutionEnvironmentToolset`][pydantic_ai.environments.ExecutionEnvironmentToolset] wraps an environment and exposes coding-agent-style tools that models are well-trained on (matching tools that popular coding agents expose): + +| Tool | Description | +|---|---| +| `shell` | Execute shell commands | +| `read_file` | Read files with line numbers (renders images for multimodal models) | +| `write_file` | Create or overwrite files | +| `edit_file` | Edit files by exact string replacement | + +Tools are dynamically registered based on the environment's capabilities. You can selectively include or exclude capabilities: + +```python {title="environments_selective_tools.py"} +from pydantic_ai.environments import ExecutionEnvironmentToolset +from pydantic_ai.environments.memory import MemoryEnvironment + +# Only file tools — no shell or search +toolset = ExecutionEnvironmentToolset( + MemoryEnvironment(), + include=['read_file', 'write_file', 'edit_file'], +) +``` + +### Using with an Agent + +The toolset manages the environment lifecycle when used as a context manager: + +```python {title="environments_agent.py"} +from pydantic_ai import Agent +from pydantic_ai.environments import ExecutionEnvironmentToolset +from pydantic_ai.environments.docker import DockerEnvironment + +env = DockerEnvironment(image='python:3.12-slim') +toolset = ExecutionEnvironmentToolset(env) + +agent = Agent('openai:gpt-5.2', toolsets=[toolset]) + +async def main(): + async with toolset: # starts the Docker container + result = await agent.run('Fetch https://httpbin.org/get and print the response') + print(result.output) + """ + Successfully fetched the URL. The response contains request metadata including headers and origin IP. + """ + # container cleaned up automatically +``` + +!!! tip "Pre-starting the environment" + Using `async with toolset:` starts the environment once and keeps it alive across all agent runs. Without it, the environment is started and stopped on each `agent.run()` call — for Docker, that means creating and destroying a container every time. Pre-start the toolset for better performance when running the agent multiple times. + +!!! note "Shared environment" + When you pass an environment directly, all concurrent `agent.run()` calls share the same environment instance (same container, filesystem, and processes). For isolated concurrent runs, use `environment_factory` — see [Concurrent Runs](#concurrent-runs) below. + +### Environment Overrides + +You can swap the backing environment at runtime using [`use_environment()`][pydantic_ai.environments.ExecutionEnvironmentToolset.use_environment]: + +```python {title="environments_override.py"} +from pydantic_ai import Agent +from pydantic_ai.environments import ExecutionEnvironmentToolset +from pydantic_ai.environments.docker import DockerEnvironment +from pydantic_ai.environments.local import LocalEnvironment + +toolset = ExecutionEnvironmentToolset(LocalEnvironment('/tmp/dev')) + +agent = Agent('openai:gpt-5.2', toolsets=[toolset]) + +async def main(): + # Default: local environment + async with LocalEnvironment('/tmp/dev') as local_env: + with toolset.use_environment(local_env): + await agent.run('echo "running locally"') + + # Override: Docker environment for untrusted input + async with DockerEnvironment() as docker_env: + with toolset.use_environment(docker_env): + await agent.run('echo "running in Docker"') +``` + +### Concurrent Runs + +When multiple `agent.run()` calls execute concurrently (e.g. via `asyncio.gather`), a shared environment means they all operate on the same filesystem and processes, which can cause interference. Use `environment_factory` to create a fresh, isolated environment for each run: + +```python {title="environments_concurrent.py"} +import asyncio + +from pydantic_ai import Agent +from pydantic_ai.environments import ExecutionEnvironmentToolset +from pydantic_ai.environments.docker import DockerEnvironment + +# Each concurrent run gets its own container +toolset = ExecutionEnvironmentToolset( + environment_factory=lambda: DockerEnvironment(image='python:3.12-slim') +) + +agent = Agent('openai:gpt-5.2', toolsets=[toolset]) + +async def main(): + # Each agent.run() enters its own `async with toolset:`, creating a separate container + await asyncio.gather( + agent.run('task A'), + agent.run('task B'), + ) +``` + +The factory is called once per `async with toolset:` entry, and the created environment is automatically cleaned up on exit. + +## Per-Call Environment Variables + +All environments support per-call environment variables via the `env` parameter on [`shell()`][pydantic_ai.environments.ExecutionEnvironment.shell] and [`create_process()`][pydantic_ai.environments.ExecutionEnvironment.create_process]. These are merged on top of any baseline `env_vars`: + +```python {title="environments_env_vars.py" test="skip"} +from pydantic_ai.environments.local import LocalEnvironment + +environment = LocalEnvironment(env_vars={'BASE_URL': 'https://api.example.com'}) + +async def main(): + async with environment: + # Uses BASE_URL from baseline + API_KEY from per-call + result = await environment.shell( + 'curl -H "Authorization: Bearer $API_KEY" $BASE_URL/data', + env={'API_KEY': 'sk-test-123'}, + ) + print(result.output) +``` + +## Interactive Processes + +For long-running or interactive workloads, use [`create_process()`][pydantic_ai.environments.ExecutionEnvironment.create_process] to get an [`ExecutionProcess`][pydantic_ai.environments.ExecutionProcess] with bidirectional streaming I/O: + +```python {title="environments_process.py" test="skip"} +from pydantic_ai.environments.local import LocalEnvironment + +env = LocalEnvironment() + +async def main(): + async with env: + async with await env.create_process('python3 -u worker.py') as proc: + await proc.send(b'{"task": "analyze"}\n') + response = await proc.recv(timeout=10.0) + print(response.decode()) +``` + +## Execution Model + +Each call to `shell()` or `create_process()` starts a fresh process. Shell state (like `cd`, shell variables) does not persist between calls. This is the same model used by other coding agents like Claude Code and Codex. + +To run commands in a specific directory, chain them: + +```python {title="environments_chaining.py" test="skip" lint="skip"} +result = await env.shell('cd /some/path && python script.py') +``` + +Filesystem changes (created files, installed packages) persist for the lifetime of the environment. + +## Building a Custom Environment + +You can implement [`ExecutionEnvironment`][pydantic_ai.environments.ExecutionEnvironment] to integrate with any execution backend. The only abstract member is `capabilities`; override the methods that match your declared capabilities. Override [`create_process()`][pydantic_ai.environments.ExecutionEnvironment.create_process] if you need interactive process support. + +```python {title="environments_custom.py" test="skip" lint="skip"} +from pydantic_ai.environments import EnvToolName, ExecutionEnvironment, ExecutionProcess, ExecutionResult + +class MyCloudEnvironment(ExecutionEnvironment): + @property + def capabilities(self) -> frozenset[EnvToolName]: + return frozenset({'shell', 'read_file', 'write_file', 'edit_file'}) + + async def shell( + self, command: str, *, timeout: float | None = 120, env: dict[str, str] | None = None + ) -> ExecutionResult: + # Run a command in your cloud environment + ... + + async def read_file( + self, path: str, *, offset: int = 0, limit: int = 2000 + ) -> str | bytes: + ... + + async def write_file(self, path: str, content: str | bytes) -> None: + ... + + async def replace_str( + self, path: str, old: str, new: str, *, replace_all: bool = False + ) -> int: + ... +``` diff --git a/docs/install.md b/docs/install.md index 2eced2cf2c..e2ba5ec30f 100644 --- a/docs/install.md +++ b/docs/install.md @@ -67,6 +67,7 @@ pip/uv-add "pydantic-ai-slim[openai]" * `ag-ui` - installs [AG-UI Event Stream Protocol](ui/ag-ui.md) dependencies `ag-ui-protocol` [PyPI ↗](https://pypi.org/project/ag-ui-protocol){:target="_blank"} and `starlette` [PyPI ↗](https://pypi.org/project/starlette){:target="_blank"} * `dbos` - installs [DBOS Durable Execution](durable_execution/dbos.md) dependency `dbos` [PyPI ↗](https://pypi.org/project/dbos){:target="_blank"} * `prefect` - installs [Prefect Durable Execution](durable_execution/prefect.md) dependency `prefect` [PyPI ↗](https://pypi.org/project/prefect){:target="_blank"} +* `docker-environment` - installs [Docker Environment](environments.md#dockerenvironment) dependency `docker` [PyPI ↗](https://pypi.org/project/docker){:target="_blank"} You can also install dependencies for multiple models and use cases, for example: diff --git a/mkdocs.yml b/mkdocs.yml index a100a42cd9..1aad2360e9 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -46,6 +46,8 @@ nav: - builtin-tools.md - common-tools.md - third-party-tools.md + - Execution Environments: + - environments.md - Advanced Features: - input.md - thinking.md @@ -171,6 +173,7 @@ nav: - api/result.md - api/retries.md - api/run.md + - api/environments.md - api/settings.md - api/tools.md - api/toolsets.md diff --git a/pydantic_ai_slim/pydantic_ai/environments/__init__.py b/pydantic_ai_slim/pydantic_ai/environments/__init__.py new file mode 100644 index 0000000000..20743ce18a --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/environments/__init__.py @@ -0,0 +1,27 @@ +"""Execution environment abstractions for agents. + +This package provides: + +- `ExecutionEnvironment` — abstract base class for execution environments +- `ExecutionProcess` — interactive process handle with bidirectional I/O +- `ExecutionEnvironmentToolset` — toolset exposing coding-agent-style tools backed by an environment +- `ExecutionResult` — result type + +Implementations: + +- `environments.docker.DockerEnvironment` — Docker container-based sandbox (isolated) +- `environments.local.LocalEnvironment` — local subprocess environment (no isolation, for dev/testing) +- `environments.memory.MemoryEnvironment` — in-memory environment for testing +""" + +from pydantic_ai.toolsets.execution_environment import ExecutionEnvironmentToolset + +from ._base import EnvToolName, ExecutionEnvironment, ExecutionProcess, ExecutionResult + +__all__ = ( + 'EnvToolName', + 'ExecutionResult', + 'ExecutionEnvironment', + 'ExecutionEnvironmentToolset', + 'ExecutionProcess', +) diff --git a/pydantic_ai_slim/pydantic_ai/environments/_base.py b/pydantic_ai_slim/pydantic_ai/environments/_base.py new file mode 100644 index 0000000000..b5c8cf8948 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/environments/_base.py @@ -0,0 +1,331 @@ +"""Base abstractions for execution environments. + +This module defines the core types, the `ExecutionEnvironment` ABC, and the +`ExecutionProcess` ABC for interactive execution with bidirectional streaming I/O. +""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, Literal + +from typing_extensions import Self + +# --- Type aliases --- + +EnvToolName = Literal[ + 'shell', + 'read_file', + 'write_file', + 'edit_file', +] +"""Tool name for an environment capability. + +Used in `capabilities` to declare which methods an environment implements, +and by `ExecutionEnvironmentToolset` for `include`/`exclude` filtering. +""" + + +# --- Data types --- + + +@dataclass +class ExecutionResult: + """Result of a completed command execution.""" + + output: str + """The combined stdout/stderr output of the command.""" + + exit_code: int + """The exit code of the command.""" + + +class ExecutionProcess(ABC): + """Handle to a running process with bidirectional streaming I/O. + + Used for interactive execution where a script outputs data, + waits for input, processes it, and outputs more data. + """ + + @abstractmethod + async def send(self, data: bytes) -> None: + """Write data to the process's stdin. + + Args: + data: The bytes to write to stdin. + """ + + @abstractmethod + async def recv(self, timeout: float | None = None) -> bytes: + """Read available output from stdout. + + Blocks until data is available, the process exits, or the timeout expires. + + Args: + timeout: Maximum seconds to wait for data. None means wait indefinitely. + + Raises: + TimeoutError: If the timeout expires with no data available. + """ + + @abstractmethod + async def recv_stderr(self, timeout: float | None = None) -> bytes: + """Read available output from stderr. + + Args: + timeout: Maximum seconds to wait for data. None means wait indefinitely. + + Raises: + TimeoutError: If the timeout expires with no data available. + """ + + @property + @abstractmethod + def returncode(self) -> int | None: + """Return code if the process has exited, None if still running.""" + + @abstractmethod + async def wait(self, timeout: float | None = None) -> int: + """Wait for the process to exit. + + Args: + timeout: Maximum seconds to wait. None means wait indefinitely. + + Returns: + The process exit code. + + Raises: + TimeoutError: If the timeout expires before the process exits. + """ + + @abstractmethod + async def kill(self) -> None: + """Kill the process.""" + + async def __aenter__(self) -> Self: + return self + + async def __aexit__(self, *args: Any) -> None: + if self.returncode is None: + await self.kill() + + +# --- Constants --- + +IMAGE_EXTENSIONS = frozenset( + { + '.png', + '.jpg', + '.jpeg', + '.gif', + '.webp', + '.bmp', + '.svg', + } +) + +IMAGE_MEDIA_TYPES: dict[str, str] = { + '.png': 'image/png', + '.jpg': 'image/jpeg', + '.jpeg': 'image/jpeg', + '.gif': 'image/gif', + '.webp': 'image/webp', + '.bmp': 'image/bmp', + '.svg': 'image/svg+xml', +} + + +# --- ExecutionEnvironment --- + + +class ExecutionEnvironment(ABC): + """Abstract base class for execution environments. + + An execution environment provides a place where agents can execute + commands, read/write files, and search the filesystem. + + Implementations range from in-memory (for testing) to local subprocess, + Docker containers, and cloud-hosted VMs. + + The only abstract member is `capabilities`; all tool methods raise + `NotImplementedError` by default. Concrete subclasses override the + methods that match their declared capabilities. + """ + + # --- Capability introspection --- + + @property + @abstractmethod + def capabilities(self) -> frozenset[EnvToolName]: + """Capabilities this environment supports (high-level). + + Used by toolsets to decide which tools to register. Only methods + corresponding to declared capabilities need to be implemented. + """ + ... + + # --- Tool methods --- + # All raise NotImplementedError by default. Concrete subclasses override + # the methods that match their declared capabilities. + + async def shell( + self, + command: str, + *, + timeout: float | None = 120, + env: dict[str, str] | None = None, + ) -> ExecutionResult: + """Execute a shell command and return the result. + + Args: + command: The shell command to execute. + timeout: Maximum seconds to wait for completion. + Pass `None` to disable the timeout. + env: Additional environment variables for this command. + Merged with (and overrides) any baseline environment variables. + + Returns: + An `ExecutionResult` with the command output and exit code. + """ + raise NotImplementedError(f'{type(self).__name__} does not support shell.') + + async def read_file( + self, + path: str, + *, + offset: int = 0, + limit: int = 2000, + ) -> str | bytes: + """Read a file from the environment. + + For text files, returns a string with `cat -n` style line numbers. + For binary files (images), returns raw bytes. + + Args: + path: The file path within the environment. + offset: The line number to start reading from (0-indexed). + Ignored for binary files. + limit: Maximum number of lines to read. + Ignored for binary files. + + Returns: + Text content with line numbers (`str`), or raw bytes for binary files. + """ + raise NotImplementedError(f'{type(self).__name__} does not support read_file.') + + async def write_file(self, path: str, content: str | bytes) -> None: + """Create or overwrite a file in the environment. + + Args: + path: The file path within the environment. + content: The file content (text or binary). + """ + raise NotImplementedError(f'{type(self).__name__} does not support write_file.') + + async def replace_str( + self, + path: str, + old: str, + new: str, + *, + replace_all: bool = False, + ) -> int: + """Edit a file by exact string replacement. + + Args: + path: The file path within the environment. + old: The exact text to find. + new: The replacement text. + replace_all: If True, replace all occurrences. If False, the + old string must appear exactly once or an error is raised. + + Returns: + The number of replacements made. + + Raises: + FileNotFoundError: If the file does not exist. + ValueError: If `old` is not found, or appears multiple times + when `replace_all` is False. + """ + raise NotImplementedError(f'{type(self).__name__} does not support replace_str.') + + # --- Internal helpers (not tools) --- + + async def create_process( + self, + command: str, + *, + env: dict[str, str] | None = None, + ) -> ExecutionProcess: + r"""Create an interactive process with streaming stdin/stdout. + + Args: + command: The shell command to run. + env: Additional environment variables for this process. + + Returns: + An `ExecutionProcess` handle for bidirectional I/O. + """ + raise NotImplementedError(f'{type(self).__name__} does not support interactive processes.') + + # --- Lifecycle --- + + async def __aenter__(self) -> Self: + """Start the environment (e.g., create a Docker container).""" + return self + + async def __aexit__(self, *args: Any) -> None: + """Stop the environment and clean up resources.""" + + +# --- Helper functions --- + + +def format_lines(text: str, offset: int, limit: int) -> str: + """Format text with line numbers and continuation hints. + + Shared helper used by `LocalEnvironment` and `MemoryEnvironment` + to produce consistent `cat -n` style output. + """ + lines = text.splitlines(keepends=True) + total_lines = len(lines) + + if offset >= total_lines and total_lines > 0: + raise ValueError(f'Offset {offset} exceeds file length ({total_lines} lines).') + + selected = lines[offset : offset + limit] + + numbered = [f'{i:>6}\t{line}' for i, line in enumerate(selected, start=offset + 1)] + result = ''.join(numbered) + if not result.endswith('\n'): + result += '\n' + + remaining = total_lines - (offset + len(selected)) + if remaining > 0: + next_offset = offset + len(selected) + result += f'... ({remaining} more lines. Use offset={next_offset} to continue reading.)\n' + + return result + + +def apply_edit(text: str, old_string: str, new_string: str, path: str, *, replace_all: bool) -> tuple[str, int]: + """Apply a string replacement edit, returning the new text and the number of replacements. + + Raises: + ValueError: If old_string is not found, or appears multiple times + when replace_all is False. + """ + count = text.count(old_string) + + if count == 0: + raise ValueError(f'old_string not found in {path}.') + if not replace_all and count > 1: + raise ValueError(f'old_string found {count} times in {path}. Use replace_all=True or provide more context.') + + if replace_all: + new_text = text.replace(old_string, new_string) + else: + new_text = text.replace(old_string, new_string, 1) + + return new_text, count if replace_all else 1 diff --git a/pydantic_ai_slim/pydantic_ai/environments/docker.py b/pydantic_ai_slim/pydantic_ai/environments/docker.py new file mode 100644 index 0000000000..79e892f03a --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/environments/docker.py @@ -0,0 +1,595 @@ +"""Docker container-based environment for isolated code execution. + +Requires the `docker` package: `pip install pydantic-ai-slim[docker-environment]` +""" + +from __future__ import annotations + +import io +import math +import posixpath +import struct +import tarfile +from pathlib import PurePosixPath +from typing import Any, cast + +import anyio +import anyio.to_thread +from typing_extensions import Self + +from ._base import ( + IMAGE_EXTENSIONS, + EnvToolName, + ExecutionEnvironment, + ExecutionProcess, + ExecutionResult, + apply_edit, +) + +try: + import docker + from docker.errors import DockerException, NotFound + from docker.models.containers import Container +except ImportError as _import_error: + raise ImportError( + 'The `docker` package is required for DockerEnvironment. ' + 'Install it with: pip install pydantic-ai-slim[docker-environment]' + ) from _import_error + + +def _shell_escape(s: str) -> str: + """Escape a string for safe use in shell commands.""" + return "'" + s.replace("'", "'\\''") + "'" + + +def _build_read_file_cmd(path: str, *, offset: int = 0, limit: int = 2000) -> str: + """Build a shell command that reads a file with line numbers.""" + escaped = _shell_escape(path) + start = offset + 1 + end = offset + limit + return ( + f'awk \'NR>={start} && NR<={end} {{printf "%6d\\t%s\\n", NR, $0}}' + f' END {{' + f'if(NR>{end}) printf "... (%d more lines. Use offset={end} to continue reading.)\\n", NR-{end};' + f' else if(NR>0 && NR<{start}) printf "__OFFSET_ERROR__:%d\\n", NR' + f"}}'" + f' {escaped}' + ) + + +def _put_file(container: Container, path: str, data: bytes) -> None: + """Write file data into a container via put_archive.""" + parent = str(PurePosixPath(path).parent) + filename = PurePosixPath(path).name + f = io.BytesIO() + with tarfile.open(fileobj=f, mode='w') as tar: + info = tarfile.TarInfo(name=filename) + info.size = len(data) + tar.addfile(info, io.BytesIO(data)) + f.seek(0) + # Unfortunately no types on docker put_archive + container.put_archive(parent, f) # pyright: ignore[reportUnknownMemberType] + + +class _DockerEnvironmentProcess(ExecutionProcess): + """Interactive process inside a Docker container using exec with socket I/O. + + Docker's exec socket uses a multiplexed stream protocol where stdout and + stderr frames are interleaved with 8-byte headers indicating the stream + type. This class properly separates the two streams so that `recv()` + returns only stdout data and `recv_stderr()` returns only stderr data. + When one stream is requested but the other arrives first, the unexpected + frame is buffered for the next call to the appropriate method. + """ + + _STDOUT = 1 + _STDERR = 2 + + def __init__(self, container: Container, command: str, work_dir: str, env: dict[str, str] | None = None) -> None: + self._container = container + self._command = command + self._work_dir = work_dir + self._env = env + self._exec_id: str | None = None + self._socket: Any = None + self._returncode: int | None = None + self._stdout_buffer: list[bytes] = [] + self._stderr_buffer: list[bytes] = [] + self._eof = False + + async def _start(self) -> None: + """Start the exec and open the socket (called from __aenter__).""" + + def _do_start() -> tuple[str, Any]: + client: Any = self._container.client + kwargs: dict[str, Any] = { + 'stdin': True, + 'stdout': True, + 'stderr': True, + 'workdir': self._work_dir, + } + if self._env: + kwargs['environment'] = self._env + exec_id: str = client.api.exec_create( + self._container.id, + ['sh', '-c', self._command], + **kwargs, + )['Id'] + sock = client.api.exec_start(exec_id, socket=True) + # docker-py returns a SocketIO wrapper; get the raw socket + raw = getattr(sock, '_sock', sock) + return exec_id, raw + + self._exec_id, self._socket = await anyio.to_thread.run_sync(_do_start) + + async def __aenter__(self) -> Self: + if self._exec_id is None: # pragma: no branch + await self._start() + return self + + async def send(self, data: bytes) -> None: + await anyio.to_thread.run_sync(self._socket.sendall, data) + + async def recv(self, timeout: float | None = None) -> bytes: + if self._stdout_buffer: + return self._stdout_buffer.pop(0) + if timeout is not None: + with anyio.fail_after(timeout): + return await self._recv_stream(self._STDOUT) + return await self._recv_stream(self._STDOUT) + + async def recv_stderr(self, timeout: float | None = None) -> bytes: + if self._stderr_buffer: + return self._stderr_buffer.pop(0) + if timeout is not None: + with anyio.fail_after(timeout): + return await self._recv_stream(self._STDERR) + return await self._recv_stream(self._STDERR) + + async def _recv_stream(self, wanted: int) -> bytes: + """Read frames until one for the wanted stream type arrives.""" + while True: + stream_type, data = await anyio.to_thread.run_sync(self._read_frame) + if not data and self._eof: + return b'' + if stream_type == wanted: + return data + # Buffer the frame for the other stream + if stream_type == self._STDOUT: + self._stdout_buffer.append(data) + else: + self._stderr_buffer.append(data) + + def _read_frame(self) -> tuple[int, bytes]: + """Read one frame from the Docker multiplexed stream. + + Docker exec socket uses a multiplexed protocol: + - 8 byte header: [stream_type(1), 0, 0, 0, size(4)] + - followed by `size` bytes of data + + Returns: + A `(stream_type, data)` tuple. `stream_type` is 1 for stdout + and 2 for stderr. Returns `(0, b'')` on EOF. + """ + if self._eof: + return 0, b'' + + header = b'' + while len(header) < 8: + chunk = self._socket.recv(8 - len(header)) + if not chunk: + self._eof = True + return 0, b'' + header += chunk + + stream_type = header[0] + size = struct.unpack('>I', header[4:8])[0] + if size == 0: + return stream_type, b'' + + data = b'' + while len(data) < size: + chunk = self._socket.recv(size - len(data)) + if not chunk: + self._eof = True + break + data += chunk + return stream_type, data + + @property + def returncode(self) -> int | None: + return self._returncode + + def _inspect_exit_code(self) -> int | None: + """Synchronously inspect the Docker exec state and cache the exit code.""" + if self._returncode is not None: + return self._returncode + if self._exec_id is None: + return None + try: + client: Any = self._container.client + info = client.api.exec_inspect(self._exec_id) + rc = info.get('ExitCode') + if not info.get('Running', False) and rc is not None: + self._returncode = rc + return rc + except (DockerException, OSError): + # Docker API may raise various errors (connection, not found, etc.) + # when inspecting exec state — treat as "still running" + pass + return None + + async def _poll_exit_code(self) -> int | None: + """Check the Docker exec state without blocking the event loop.""" + return await anyio.to_thread.run_sync(self._inspect_exit_code) + + async def wait(self, timeout: float | None = None) -> int: + async def _poll() -> int: + while True: + rc = await self._poll_exit_code() + if rc is not None: + return rc + await anyio.sleep(0.1) + + if timeout is not None: + with anyio.fail_after(timeout): + return await _poll() + return await _poll() + + async def kill(self) -> None: + # Docker exec doesn't provide a direct kill; close the socket + try: + self._socket.close() + except OSError: + pass + + async def __aexit__(self, *args: Any) -> None: + await self._poll_exit_code() + if self._returncode is None: + await self.kill() + + +class DockerEnvironment(ExecutionEnvironment): + """Docker container-based environment for isolated code execution. + + Provides isolated code execution with configurable resource limits, + network access, and persistent or ephemeral workspaces. + + Usage: + ```python {test="skip" lint="skip"} + async with DockerEnvironment(image='python:3.12-slim') as env: + result = await env.shell('python -c "print(42)"') + print(result.output) + ``` + """ + + def __init__( + self, + *, + image: str = 'python:3.12-slim', + env_vars: dict[str, str] | None = None, + work_dir: str = '/workspace', + volumes: dict[str, dict[str, str]] | None = None, + memory_limit: str | None = None, + cpu_limit: float | None = None, + pids_limit: int | None = None, + network_disabled: bool = False, + read_only: bool = False, + cap_drop: list[str] | None = None, + security_opt: list[str] | None = None, + user: str | None = None, + tmpfs: dict[str, str] | None = None, + init: bool = False, + ) -> None: + """Create a Docker environment. + + Args: + image: Docker image to use. Pre-build custom images with any + required packages before passing them here. + env_vars: Baseline environment variables to set in the container. + work_dir: Working directory inside the container. + volumes: Volume mounts (Docker format). + memory_limit: Memory limit (e.g. '512m', '1g'). + cpu_limit: CPU limit (e.g. 1.0 for one CPU). + pids_limit: Maximum number of PIDs in the container (e.g. 256). + Prevents fork bombs. + network_disabled: Whether to disable network access. + read_only: Whether to mount the root filesystem as read-only. + Use with `tmpfs` to provide writable scratch space. + cap_drop: Linux capabilities to drop (e.g. `['ALL']`). + security_opt: Security options (e.g. `['no-new-privileges']`). + user: User to run as inside the container (e.g. `'nobody'`). + tmpfs: tmpfs mounts as `{path: options}` + (e.g. `{'/tmp': 'noexec,nosuid,size=64m'}`). + init: Whether to use `--init` to run an init process as PID 1. + Ensures proper signal handling and zombie reaping. + """ + self._image = image + self._env_vars = env_vars or {} + self._work_dir = work_dir + self._volumes = volumes + self._memory_limit = memory_limit + self._cpu_limit = cpu_limit + self._pids_limit = pids_limit + self._network_disabled = network_disabled + self._read_only = read_only + self._cap_drop = cap_drop + self._security_opt = security_opt + self._user = user + self._tmpfs = tmpfs + self._init = init + + self._client: docker.DockerClient | None = None + self._container: Container | None = None + + @classmethod + def hardened( + cls, + *, + image: str = 'python:3.12-slim', + env_vars: dict[str, str] | None = None, + work_dir: str = '/workspace', + memory_limit: str = '512m', + cpu_limit: float = 1.0, + pids_limit: int = 256, + ) -> DockerEnvironment: + """Create a hardened Docker environment with security best practices. + + This is a convenience constructor that sets sensible security defaults: + network disabled, read-only root filesystem, all capabilities dropped, + no privilege escalation, runs as `nobody`, and uses an init process. + + The root filesystem is read-only; writable tmpfs mounts are provided at + `/tmp` and the working directory. + + Args: + image: Docker image to use. + env_vars: Baseline environment variables to set in the container. + work_dir: Working directory inside the container. + memory_limit: Memory limit (e.g. '512m', '1g'). + cpu_limit: CPU limit (e.g. 1.0 for one CPU). + pids_limit: Maximum number of PIDs in the container. + """ + return cls( + image=image, + env_vars=env_vars, + work_dir=work_dir, + network_disabled=True, + read_only=True, + cap_drop=['ALL'], + security_opt=['no-new-privileges'], + user='nobody', + pids_limit=pids_limit, + tmpfs={'/tmp': 'noexec,nosuid,size=64m', work_dir: 'size=128m'}, + init=True, + memory_limit=memory_limit, + cpu_limit=cpu_limit, + ) + + @property + def capabilities(self) -> frozenset[EnvToolName]: # pragma: lax no cover + return frozenset( + { + 'shell', + 'read_file', + 'write_file', + 'edit_file', + } + ) + + async def __aenter__(self) -> Self: # pragma: lax no cover + await anyio.to_thread.run_sync(self._setup) + return self + + def _setup(self) -> None: + """Start container (sync, runs in executor).""" + if self._container is not None: + return + self._client = docker.from_env() + + # Create and start container + kwargs: dict[str, Any] = { + 'image': self._image, + 'command': 'sleep infinity', + 'detach': True, + 'working_dir': self._work_dir, + 'environment': self._env_vars, + 'auto_remove': False, + } + if self._volumes: + kwargs['volumes'] = self._volumes + if self._memory_limit: + kwargs['mem_limit'] = self._memory_limit + if self._cpu_limit: + kwargs['nano_cpus'] = int(self._cpu_limit * 1e9) + if self._pids_limit is not None: + kwargs['pids_limit'] = self._pids_limit + if self._network_disabled: + kwargs['network_disabled'] = True + if self._read_only: + kwargs['read_only'] = True + if self._cap_drop: + kwargs['cap_drop'] = self._cap_drop + if self._security_opt: + kwargs['security_opt'] = self._security_opt + if self._user: + kwargs['user'] = self._user + if self._tmpfs: + kwargs['tmpfs'] = self._tmpfs + if self._init: + kwargs['init'] = True + + self._container = cast(Container, self._client.containers.run(**kwargs)) + + # Ensure work_dir exists + self._container.exec_run(['mkdir', '-p', self._work_dir]) + + async def __aexit__(self, *_args: Any) -> None: # pragma: lax no cover + if self._container is not None: # pragma: no branch + await anyio.to_thread.run_sync(self._teardown) + + def _teardown(self) -> None: + """Stop and remove container (sync, runs in executor).""" + if self._container is not None: # pragma: no branch + try: + self._container.stop(timeout=5) + except (DockerException, OSError): + # Best-effort cleanup: container may already be stopped or removed + pass + try: + self._container.remove(force=True) + except (DockerException, OSError): + # Best-effort cleanup: container may already be removed + pass + self._container = None + if self._client is not None: + try: + self._client.close() + except (DockerException, OSError): + pass + self._client = None + + @property + def _required_container(self) -> Container: + if self._container is None: + raise RuntimeError('DockerEnvironment not started. Use `async with DockerEnvironment(...) as env:`') + return self._container + + def _resolve_path(self, path: str) -> str: + """Resolve a path relative to work_dir for Docker API calls. + + Docker API methods like `put_archive` and `get_archive` resolve + paths against the container root `/`, not the working directory. + This helper ensures relative paths are resolved against `work_dir`. + """ + if not path.startswith('/'): + return f'{self._work_dir}/{path}' + return path + + async def create_process( + self, + command: str, + *, + env: dict[str, str] | None = None, + ) -> ExecutionProcess: + return _DockerEnvironmentProcess(self._required_container, command, self._work_dir, env=env) + + async def shell( + self, + command: str, + *, + timeout: float | None = 120, + env: dict[str, str] | None = None, + ) -> ExecutionResult: + """Execute a command in the container.""" + if timeout is not None and timeout <= 0: + raise ValueError(f'timeout must be positive or None, got {timeout}') + + def _exec() -> tuple[int, bytes]: + if timeout is not None: + # Note: GNU coreutils `timeout 0` means "no timeout" (wait forever), + # so we validate timeout > 0 above to prevent surprising behavior. + wrapped = f'timeout {math.ceil(timeout)} sh -c {_shell_escape(command)}' + else: + wrapped = command + exec_kwargs: dict[str, Any] = {'workdir': self._work_dir} + if env: + exec_kwargs['environment'] = env + exit_code, output = self._required_container.exec_run( + ['sh', '-c', wrapped], + **exec_kwargs, + ) + return exit_code, output + + exit_code, output_bytes = await anyio.to_thread.run_sync(_exec) + output = output_bytes.decode('utf-8', errors='replace') + # timeout command returns 124 on timeout + if exit_code == 124 and timeout is not None: + output += '\n[Command timed out]' + return ExecutionResult(output=output, exit_code=exit_code) + + async def read_file(self, path: str, *, offset: int = 0, limit: int = 2000) -> str | bytes: + ext = posixpath.splitext(path)[1].lower() + if ext in IMAGE_EXTENSIONS: + return await anyio.to_thread.run_sync(self._read_file_bytes_sync, path) + + def _read() -> str | bytes: + cmd = _build_read_file_cmd(path, offset=offset, limit=limit) + exit_code, output = self._required_container.exec_run(['sh', '-c', cmd], workdir=self._work_dir) + if exit_code != 0: + raise FileNotFoundError(f'File not found or not readable: {path}') + try: + text = output.decode('utf-8') + except UnicodeDecodeError: + return self._read_file_bytes_sync(path) + if text.startswith('__OFFSET_ERROR__:'): + total_lines = int(text.split(':')[1].strip()) + raise ValueError(f'Offset {offset} exceeds file length ({total_lines} lines).') + return text + + return await anyio.to_thread.run_sync(_read) + + def _read_file_bytes_sync(self, path: str) -> bytes: + """Read raw file bytes using Docker's get_archive API.""" + try: + bits, _ = self._required_container.get_archive(self._resolve_path(path)) + except NotFound: + raise FileNotFoundError(f'File not found: {path}') + # get_archive returns a tar stream + tar_bytes = b''.join(bits) + with tarfile.open(fileobj=io.BytesIO(tar_bytes)) as tar: + members = tar.getmembers() + if not members: # pragma: no cover + raise FileNotFoundError(f'File not found: {path}') + extracted = tar.extractfile(members[0]) + if extracted is None: # pragma: no cover + raise FileNotFoundError(f'Cannot read file: {path}') + return extracted.read() + + async def write_file(self, path: str, content: str | bytes) -> None: + def _write() -> None: + full_path = self._resolve_path(path) + # Ensure parent directory exists + parent = str(PurePosixPath(full_path).parent) + self._required_container.exec_run(['mkdir', '-p', parent]) + + data = content.encode('utf-8') if isinstance(content, str) else content + _put_file(self._required_container, full_path, data) + + await anyio.to_thread.run_sync(_write) + + async def replace_str( + self, + path: str, + old: str, + new: str, + *, + replace_all: bool = False, + ) -> int: + def _edit() -> int: + raw = self._read_file_bytes_sync(path) + text = raw.decode('utf-8') + new_text, count = apply_edit(text, old, new, path, replace_all=replace_all) + _put_file(self._required_container, self._resolve_path(path), new_text.encode('utf-8')) + return count + + return await anyio.to_thread.run_sync(_edit) + + async def is_alive(self) -> bool: + """Check if the container is running. + + Returns: + True if the container is running, False otherwise. + """ + if self._container is None: + return False + + def _check() -> bool: + assert self._container is not None + try: + self._container.reload() + return self._container.status == 'running' + except (DockerException, OSError): + return False + + return await anyio.to_thread.run_sync(_check) diff --git a/pydantic_ai_slim/pydantic_ai/environments/local.py b/pydantic_ai_slim/pydantic_ai/environments/local.py new file mode 100644 index 0000000000..5ba2569686 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/environments/local.py @@ -0,0 +1,276 @@ +"""Local subprocess-based execution environment for development and testing. + +Runs commands directly on the host machine within a specified root directory. +**No isolation** — use `DockerEnvironment` for untrusted code. +""" + +from __future__ import annotations + +import subprocess +from pathlib import Path +from typing import Any + +import anyio +import anyio.abc +import anyio.to_thread +from typing_extensions import Self + +from ._base import ( + IMAGE_EXTENSIONS, + EnvToolName, + ExecutionEnvironment, + ExecutionProcess, + ExecutionResult, + apply_edit, + format_lines, +) + + +class _LocalEnvironmentProcess(ExecutionProcess): + """Interactive process backed by `anyio.abc.Process`.""" + + def __init__(self, proc: anyio.abc.Process) -> None: + self._proc = proc + + async def send(self, data: bytes) -> None: + stdin = self._proc.stdin + if stdin is None: + raise RuntimeError('Process stdin is not available.') + await stdin.send(data) + + async def recv(self, timeout: float | None = None) -> bytes: + stdout = self._proc.stdout + if stdout is None: + raise RuntimeError('Process stdout is not available.') + try: + if timeout is not None: + with anyio.fail_after(timeout): + return await stdout.receive(8192) + return await stdout.receive(8192) + except anyio.EndOfStream: + return b'' + + async def recv_stderr(self, timeout: float | None = None) -> bytes: + stderr = self._proc.stderr + if stderr is None: + raise RuntimeError('Process stderr is not available.') + try: + if timeout is not None: + with anyio.fail_after(timeout): + return await stderr.receive(8192) + return await stderr.receive(8192) + except anyio.EndOfStream: + return b'' + + @property + def returncode(self) -> int | None: + return self._proc.returncode + + async def wait(self, timeout: float | None = None) -> int: + if timeout is not None: + with anyio.fail_after(timeout): + return await self._proc.wait() + return await self._proc.wait() + + async def kill(self) -> None: + try: + self._proc.kill() + except ProcessLookupError: + pass + await self._proc.aclose() + _close_subprocess_transport(self._proc) + + +def _close_subprocess_transport(proc: anyio.abc.Process) -> None: + """Close the underlying asyncio subprocess transport to prevent ResourceWarning on Python 3.10. + + On Python 3.10, asyncio subprocess transports are not closed by + `Process.wait()` or `Process.aclose()` and their `__del__` + emits `ResourceWarning: unclosed transport`. Python 3.11+ fixed + this, but we still support 3.10. + """ + inner = getattr(proc, '_process', None) # anyio wraps asyncio.subprocess.Process + transport = getattr(inner, '_transport', None) + if transport is not None: # pragma: no branch + transport.close() + + +class LocalEnvironment(ExecutionEnvironment): + """Local subprocess-based execution environment for development and testing. + + Runs commands directly on the host machine within a specified root + directory. Provides no isolation — use `DockerEnvironment` for untrusted code. + + Usage: + ```python {test="skip" lint="skip"} + async with LocalEnvironment(root_dir='/tmp/workspace') as env: + result = await env.shell('python script.py') + print(result.output) + ``` + """ + + def __init__( + self, + root_dir: str | Path = '.', + *, + env_vars: dict[str, str] | None = None, + inherit_env: bool = True, + ) -> None: + """Create a local execution environment. + + Args: + root_dir: The working directory for all operations. + Defaults to the current directory. + env_vars: Baseline environment variables for all commands. + inherit_env: Whether to inherit the host's environment variables. + When True (default), `env_vars` and per-call `env` are merged + on top of `os.environ`. When False, only `env_vars` and per-call + `env` are used (useful for reproducibility and testing). + """ + self._root_dir = Path(root_dir).resolve() + self._env_vars = env_vars or {} + self._inherit_env = inherit_env + + @property + def capabilities(self) -> frozenset[EnvToolName]: + return frozenset({'shell', 'read_file', 'write_file', 'edit_file'}) + + async def __aenter__(self) -> Self: + await anyio.to_thread.run_sync(lambda: self._root_dir.mkdir(parents=True, exist_ok=True)) + return self + + async def __aexit__(self, *_args: Any) -> None: + pass + + def _resolve_path(self, path: str) -> Path: + """Resolve a path relative to root_dir, preventing traversal.""" + resolved = (self._root_dir / path).resolve() + if not resolved.is_relative_to(self._root_dir): + raise PermissionError(f'Path {path!r} resolves outside the environment root.') + return resolved + + def _build_env(self, env: dict[str, str] | None) -> dict[str, str] | None: + """Merge baseline env vars with per-call overrides.""" + if not self._env_vars and not env and self._inherit_env: + return None # subprocess inherits naturally + import os + + merged = {**os.environ} if self._inherit_env else {} + merged.update(self._env_vars) + if env: + merged.update(env) + return merged + + async def create_process( + self, + command: str, + *, + env: dict[str, str] | None = None, + ) -> ExecutionProcess: + proc = await anyio.open_process( + command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=self._root_dir, + env=self._build_env(env), + ) + return _LocalEnvironmentProcess(proc) + + async def shell( + self, + command: str, + *, + timeout: float | None = 120, + env: dict[str, str] | None = None, + ) -> ExecutionResult: + """Execute a command using subprocess for simplicity and reliability.""" + if timeout is not None and timeout <= 0: + raise ValueError(f'timeout must be positive or None, got {timeout}') + proc = await anyio.open_process( + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=self._root_dir, + env=self._build_env(env), + ) + try: + assert proc.stdout is not None + chunks: list[bytes] = [] + if timeout is not None: + with anyio.fail_after(timeout): + async for chunk in proc.stdout: + chunks.append(chunk) + await proc.wait() + else: + async for chunk in proc.stdout: + chunks.append(chunk) + await proc.wait() + except TimeoutError: + proc.kill() + with anyio.CancelScope(shield=True): + await proc.wait() + _close_subprocess_transport(proc) + return ExecutionResult(output='[Command timed out]', exit_code=-1) + + _close_subprocess_transport(proc) + stdout = b''.join(chunks) + output = stdout.decode('utf-8', errors='replace') + return ExecutionResult( + output=output, + exit_code=proc.returncode if proc.returncode is not None else 0, + ) + + async def read_file(self, path: str, *, offset: int = 0, limit: int = 2000) -> str | bytes: + resolved = self._resolve_path(path) + + def _read() -> str | bytes: + if not resolved.is_file(): + if resolved.is_dir(): + raise FileNotFoundError(f"'{path}' is a directory, not a file.") + raise FileNotFoundError(f'File not found: {path}') + + if resolved.suffix.lower() in IMAGE_EXTENSIONS: + return resolved.read_bytes() + + raw = resolved.read_bytes() + try: + text = raw.decode('utf-8') + except UnicodeDecodeError: + return raw + return format_lines(text, offset, limit) + + return await anyio.to_thread.run_sync(_read) + + async def write_file(self, path: str, content: str | bytes) -> None: + resolved = self._resolve_path(path) + + def _write() -> None: + resolved.parent.mkdir(parents=True, exist_ok=True) + if isinstance(content, bytes): + resolved.write_bytes(content) + else: + resolved.write_text(content, encoding='utf-8') + + await anyio.to_thread.run_sync(_write) + + async def replace_str( + self, + path: str, + old: str, + new: str, + *, + replace_all: bool = False, + ) -> int: + resolved = self._resolve_path(path) + + def _edit() -> int: + if not resolved.is_file(): + raise FileNotFoundError(f'File not found: {path}') + + text = resolved.read_text(encoding='utf-8') + new_text, count = apply_edit(text, old, new, path, replace_all=replace_all) + resolved.write_text(new_text, encoding='utf-8') + return count + + return await anyio.to_thread.run_sync(_edit) diff --git a/pydantic_ai_slim/pydantic_ai/environments/memory.py b/pydantic_ai_slim/pydantic_ai/environments/memory.py new file mode 100644 index 0000000000..78134c0179 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/environments/memory.py @@ -0,0 +1,172 @@ +"""In-memory execution environment for testing. + +All file operations use an in-memory dictionary. Shell commands are handled +by an optional callback — if not provided, `shell()` raises `RuntimeError`. +""" + +from __future__ import annotations + +import posixpath +from collections.abc import Callable, Mapping +from typing import TYPE_CHECKING + +from ._base import ( + IMAGE_EXTENSIONS, + ExecutionEnvironment, + ExecutionResult, + apply_edit, + format_lines, +) + +if TYPE_CHECKING: + from ._base import EnvToolName + + +class MemoryEnvironment(ExecutionEnvironment): + """In-memory execution environment for testing. + + File operations use an in-memory dictionary, making tests fast and + isolated with no filesystem access. Shell commands can optionally be + handled by a user-provided callback. + + This is the testing counterpart to `LocalEnvironment`, analogous to + how `TestModel` and `FunctionModel` relate to real model classes. + + Usage: + ```python {test="skip" lint="skip"} + from pydantic_ai.environments.memory import MemoryEnvironment + + env = MemoryEnvironment(files={'main.py': 'print("hello")'}) + async with env: + content = await env.read_file('main.py') + assert 'hello' in content + ``` + """ + + def __init__( + self, + files: dict[str, str | bytes] | None = None, + *, + command_handler: Callable[[str], ExecutionResult] | None = None, + ) -> None: + """Create an in-memory execution environment. + + Args: + files: Initial files to populate the environment with. + Keys are file paths, values are file contents (str or bytes). + command_handler: Optional callback for `shell()` calls. + Receives the command string and returns an `ExecutionResult`. + If not provided, `shell()` raises `RuntimeError`. + """ + self._files: dict[str, str | bytes] = {} + if files: + for path, content in files.items(): + self._files[self._normalize(path)] = content + self._command_handler = command_handler + + @property + def capabilities(self) -> frozenset[EnvToolName]: + caps: set[EnvToolName] = {'read_file', 'write_file', 'edit_file'} + if self._command_handler is not None: + caps.add('shell') + return frozenset(caps) + + @property + def files(self) -> Mapping[str, str | bytes]: + """Read-only view of the in-memory file system. + + Keys are normalized file paths, values are file contents. + Useful for test assertions against raw file content without the + line-number formatting that [`read_file()`][pydantic_ai.environments.memory.MemoryEnvironment.read_file] adds. + """ + return self._files + + @staticmethod + def _normalize(path: str) -> str: + """Normalize a path for consistent storage.""" + normalized = posixpath.normpath(path) + # Strip leading './' or '/' + if normalized.startswith('./'): # pragma: no cover + normalized = normalized[2:] + elif normalized.startswith('/'): + normalized = normalized[1:] + return normalized + + async def shell( + self, + command: str, + *, + timeout: float | None = 120, + env: dict[str, str] | None = None, + ) -> ExecutionResult: + """Execute a command using the configured handler. + + Args: + command: The shell command to execute. + timeout: Ignored for MemoryEnvironment. + env: Ignored for MemoryEnvironment. + + Returns: + The result from the command handler. + + Raises: + RuntimeError: If no command_handler was provided. + """ + if self._command_handler is None: + raise RuntimeError( + 'MemoryEnvironment has no command_handler configured. ' + 'Pass command_handler= to the constructor to handle shell() calls.' + ) + return self._command_handler(command) + + async def read_file(self, path: str, *, offset: int = 0, limit: int = 2000) -> str | bytes: + normalized = self._normalize(path) + + # Check if path is a "directory" (any file starts with path/) + if any(k.startswith(normalized + '/') for k in self._files): + if normalized not in self._files: + raise FileNotFoundError(f"'{path}' is a directory, not a file.") + + if normalized not in self._files: + raise FileNotFoundError(f'File not found: {path}') + + content = self._files[normalized] + + # Return raw bytes for image files + ext = posixpath.splitext(normalized)[1].lower() + if ext in IMAGE_EXTENSIONS: + if isinstance(content, bytes): + return content + return content.encode('utf-8') + + # Text mode + if isinstance(content, bytes): + try: + text = content.decode('utf-8') + except UnicodeDecodeError: + return content + else: + text = content + + return format_lines(text, offset, limit) + + async def write_file(self, path: str, content: str | bytes) -> None: + self._files[self._normalize(path)] = content + + async def replace_str( + self, + path: str, + old: str, + new: str, + *, + replace_all: bool = False, + ) -> int: + normalized = self._normalize(path) + if normalized not in self._files: + raise FileNotFoundError(f'File not found: {path}') + + content = self._files[normalized] + text = content.decode('utf-8') if isinstance(content, bytes) else content + new_text, count = apply_edit(text, old, new, path, replace_all=replace_all) + self._files[normalized] = new_text + return count diff --git a/pydantic_ai_slim/pydantic_ai/toolsets/execution_environment.py b/pydantic_ai_slim/pydantic_ai/toolsets/execution_environment.py new file mode 100644 index 0000000000..788fcedd41 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/toolsets/execution_environment.py @@ -0,0 +1,364 @@ +"""ExecutionEnvironmentToolset — exposes coding-agent-style tools backed by an ExecutionEnvironment.""" + +from __future__ import annotations + +import inspect +import posixpath +from collections.abc import Callable, Iterator, Sequence +from contextlib import AsyncExitStack, contextmanager +from contextvars import ContextVar, Token +from dataclasses import replace +from typing import TYPE_CHECKING, Any + +import anyio +from typing_extensions import Self + +from ..environments._base import ( + IMAGE_EXTENSIONS, + IMAGE_MEDIA_TYPES, + EnvToolName, + ExecutionEnvironment, +) +from ..exceptions import ModelRetry +from ..messages import BinaryContent +from ..toolsets.function import FunctionToolset + +if TYPE_CHECKING: + from .._run_context import AgentDepsT, RunContext + from ..toolsets.abstract import ToolsetTool + + +_TOOL_TO_ENV_METHOD: dict[str, str] = { + 'edit_file': 'replace_str', +} +"""Map tool names to environment method names where they differ.""" + + +class ExecutionEnvironmentToolset(FunctionToolset[Any]): + """Toolset providing coding-agent-style tools backed by an `ExecutionEnvironment`. + + Tool names and schemas are designed to match what popular coding agents + expose, so models are well-trained on them. + + Tools are dynamically registered based on the environment's `capabilities`, + filtered by `include`/`exclude`. + + The environment can be: + - Passed directly at construction time via `shared_environment` (shared across concurrent runs) + - Created per-run via `environment_factory` (isolated concurrent runs) + - Set/overridden via context var using `use_environment()` (for testing or per-call-site config) + + Usage: + ```python {test="skip" lint="skip"} + from pydantic_ai import Agent + from pydantic_ai.environments import ExecutionEnvironmentToolset + from pydantic_ai.environments.docker import DockerEnvironment + + env = DockerEnvironment(image='python:3.12-slim') + toolset = ExecutionEnvironmentToolset(env) + + agent = Agent('openai:gpt-5.2', toolsets=[toolset]) + + async with env: + result = await agent.run('Write a script that prints hello') + ``` + """ + + def __init__( + self, + shared_environment: ExecutionEnvironment | None = None, + *, + environment_factory: Callable[[], ExecutionEnvironment] | None = None, + include: Sequence[EnvToolName] | None = None, + exclude: Sequence[EnvToolName] | None = None, + require_shell_approval: bool = False, + require_write_approval: bool = False, + image_support: bool = True, + max_image_bytes: int = 50 * 1024 * 1024, + max_retries: int = 1, + id: str | None = None, + ): + """Create a new execution environment toolset. + + Args: + shared_environment: A shared execution environment for tool execution. + All concurrent runs share this single environment instance. + Can also be set later via `use_environment()`. + environment_factory: A callable that creates a fresh environment per + `async with toolset:` entry. Use this for concurrent runs that need + isolation (e.g. separate Docker containers). Mutually exclusive with + `shared_environment`. + include: Tool names to include. `None` means all tools supported + by the environment. Pass an explicit sequence to restrict to + specific tools. + exclude: Tool names to exclude. `None` defaults to no exclusions. + Pass an explicit sequence to exclude specific tools. + require_shell_approval: Whether the `shell` tool requires human-in-the-loop + approval before execution. Recommended for `LocalEnvironment` where + commands run directly on the host. + require_write_approval: Whether `write_file` and edit tools require + human-in-the-loop approval before execution. + image_support: Whether `read_file` should return images as `BinaryContent` + for multimodal models (otherwise returns a placeholder message). + max_image_bytes: Maximum image file size to return as BinaryContent. + max_retries: Maximum retries per tool call. + id: Optional unique ID for the toolset (required for durable execution). + """ + if shared_environment is not None and environment_factory is not None: + raise ValueError('Cannot provide both shared_environment and environment_factory.') + + super().__init__(max_retries=max_retries, id=id) + self._shared_environment = shared_environment + self._environment_factory = environment_factory + self._environment_override: ContextVar[ExecutionEnvironment | None] = ContextVar( + f'_environment_override_{id or "environment"}', default=None + ) + self._per_run_state: ContextVar[tuple[AsyncExitStack, Token[ExecutionEnvironment | None]] | None] = ContextVar( + f'_per_run_state_{id or "environment"}', default=None + ) + self._include: frozenset[EnvToolName] | None = frozenset(include) if include is not None else None + self._exclude: frozenset[EnvToolName] = frozenset(exclude) if exclude else frozenset() + self._image_support = image_support + self._max_image_bytes = max_image_bytes + self._require_shell_approval = require_shell_approval + self._require_write_approval = require_write_approval + self._enter_lock = anyio.Lock() + self._running_count: int = 0 + self._exit_stack: AsyncExitStack | None = None + + # Register all tools unconditionally so schemas are built eagerly. + # get_tools() filters at runtime based on the current environment's capabilities. + self._register_tools() + + def _resolve_tool_names(self, env: ExecutionEnvironment) -> frozenset[str]: + """Determine which tool names to expose, based on the environment's capabilities and include/exclude.""" + tool_names: set[str] = set(env.capabilities) + + if self._include is not None: + tool_names &= self._include + tool_names -= self._exclude + + return frozenset(tool_names) + + def _register_tools(self) -> None: + """Register all tools unconditionally. + + Filtering based on the environment's capabilities and include/exclude + is deferred to ``get_tools()``, which runs at request time when the + active environment is known. + """ + self._register_shell() + self._register_read_file() + self._register_write_file() + self._register_edit_file() + + def _register_shell(self) -> None: + async def shell(command: str, timeout: int = 120) -> str: + """Execute a shell command and return its output. + + Use this for running scripts, installing packages, and other terminal operations. + + Args: + command: The shell command to execute. + timeout: Maximum seconds to wait for the command to complete. + """ + result = await self.required_environment.shell(command, timeout=timeout) + parts: list[str] = [] + if result.output: + parts.append(result.output) + parts.append(f'Exit code: {result.exit_code}') + return '\n'.join(parts) + + self.tool(requires_approval=self._require_shell_approval)(shell) + + def _register_read_file(self) -> None: + async def read_file(path: str, offset: int = 0, limit: int = 2000) -> Any: + """Read a file from the filesystem. + + Returns text files with line numbers, or renders image files for visual inspection. + Use offset and limit to read specific sections of large files. + + Args: + path: The file path to read. + offset: The line number to start reading from (0-indexed). + limit: Maximum number of lines to read. + """ + try: + content = await self.required_environment.read_file(path, offset=offset, limit=limit) + if isinstance(content, bytes): + ext = posixpath.splitext(path)[1].lower() + if ext in IMAGE_EXTENSIONS: + # Image file — return as BinaryContent or placeholder + if self._image_support: + if len(content) > self._max_image_bytes: + return ( + f'Error: Image too large ({len(content)} bytes, max {self._max_image_bytes} bytes).' + ) + media_type = IMAGE_MEDIA_TYPES.get(ext, 'application/octet-stream') + return BinaryContent(data=content, media_type=media_type) + else: + return f'[Image file: {path} — image_support is disabled on this toolset]' + else: + return f'[Binary file: {path} — cannot display as text]' + return content + except (FileNotFoundError, PermissionError, ValueError, OSError) as e: + return f'Error: {e}' + + self.tool(read_file) + + def _register_write_file(self) -> None: + async def write_file(path: str, content: str) -> str: + """Create or overwrite a file. + + The file and any parent directories will be created if they do not exist. + + Args: + path: The file path to write. + content: The content to write to the file. + """ + try: + await self.required_environment.write_file(path, content) + return f'File written: {path}' + except (PermissionError, OSError) as e: + return f'Error: {e}' + + self.tool(requires_approval=self._require_write_approval)(write_file) + + def _register_edit_file(self) -> None: + async def edit_file(path: str, old: str, new: str, replace_all: bool = False) -> str: + """Edit a file by exact string replacement. + + The old string must match exactly (including whitespace and indentation). + For uniqueness, include surrounding context lines. + Only use this after reading the file first. + + Args: + path: The file path to edit. + old: The exact text to find and replace. + new: The replacement text. + replace_all: Replace all occurrences. Defaults to false (old must be unique). + """ + try: + count = await self.required_environment.replace_str(path, old, new, replace_all=replace_all) + return f'Replaced {count} occurrence{"s" if count != 1 else ""} in {path}.' + except (FileNotFoundError, PermissionError, ValueError, OSError) as e: + raise ModelRetry(str(e)) + + self.tool(requires_approval=self._require_write_approval)(edit_file) + + async def get_tools(self, ctx: RunContext[AgentDepsT]) -> dict[str, ToolsetTool[AgentDepsT]]: + all_tools = await super().get_tools(ctx) + env = self.required_environment + tool_names = self._resolve_tool_names(env) + filtered = {name: tool for name, tool in all_tools.items() if name in tool_names} + + # Override tool descriptions from environment method docstrings. + # Each environment subclass can document its tool methods with LLM-facing + # docstrings (e.g. explaining regex flavor for grep); if present, these + # replace the generic default description. + env_type = type(env) + for tool_name, tool in filtered.items(): + method_name = _TOOL_TO_ENV_METHOD.get(tool_name, tool_name) + env_method = getattr(env_type, method_name, None) + base_method = getattr(ExecutionEnvironment, method_name, None) + if env_method is not None and env_method is not base_method and env_method.__doc__: + desc = inspect.cleandoc(env_method.__doc__) + filtered[tool_name] = replace(tool, tool_def=replace(tool.tool_def, description=desc)) + + return filtered + + @property + def tool_name_conflict_hint(self) -> str: + return 'Wrap the ExecutionEnvironmentToolset in a PrefixedToolset to avoid name conflicts.' + + @property + def environment(self) -> ExecutionEnvironment | None: + """The active execution environment, or None if not configured. + + Checks the context var override first (which includes per-run factory + environments), then falls back to the shared environment. + """ + override = self._environment_override.get() + if override is not None: + return override + return self._shared_environment + + @property + def required_environment(self) -> ExecutionEnvironment: + """The active execution environment, raising if not configured. + + Raises: + RuntimeError: If no environment is available. + """ + env = self.environment + if env is not None: + return env + raise RuntimeError( + 'No execution environment configured. Pass one to ExecutionEnvironmentToolset() or use .use_environment().' + ) + + @contextmanager + def use_environment(self, environment: ExecutionEnvironment) -> Iterator[None]: + """Override the execution environment for the current context. + + Useful for testing or using different environments at different call sites. + + Usage: + ```python {test="skip" lint="skip"} + with toolset.use_environment(test_env): + result = await agent.run('test prompt', toolsets=[toolset]) + ``` + + Args: + environment: The execution environment to use within this context. + """ + token = self._environment_override.set(environment) + try: + yield + finally: + self._environment_override.reset(token) + + # --- Lifecycle --- + + async def __aenter__(self) -> Self: + if self._environment_factory is not None: + env = self._environment_factory() + stack = AsyncExitStack() + await stack.enter_async_context(env) + token = self._environment_override.set(env) + self._per_run_state.set((stack, token)) + else: + async with self._enter_lock: + self._running_count += 1 + if self._running_count == 1: + # Use _shared_environment directly (not required_environment) to avoid + # entering a use_environment() override into the shared exit stack. + env = self._shared_environment + if env is None: + self._running_count -= 1 + raise RuntimeError( + 'No execution environment configured. Pass one to ExecutionEnvironmentToolset() or use environment_factory.' + ) + self._exit_stack = AsyncExitStack() + try: + await self._exit_stack.enter_async_context(env) + except Exception: + self._running_count -= 1 + raise + return self + + async def __aexit__(self, *args: Any) -> bool | None: + if self._environment_factory is not None: + state = self._per_run_state.get() + if state is not None: # pragma: no branch + stack, token = state + await stack.aclose() + self._environment_override.reset(token) + self._per_run_state.set(None) + else: + async with self._enter_lock: + self._running_count -= 1 + if self._running_count == 0 and self._exit_stack is not None: + await self._exit_stack.aclose() + self._exit_stack = None + return None diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 3483ecd22e..44cc42ce57 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -129,6 +129,8 @@ temporal = ["temporalio==1.20.0"] dbos = ["dbos>=2.10.0"] # Prefect prefect = ["prefect>=3.4.21"] +# Execution environments +docker-environment = ["docker>=7.0"] [tool.hatch.metadata] allow-direct-references = true @@ -139,5 +141,13 @@ pai = "pydantic_ai._cli:cli_exit" # TODO remove this when clai has been out for [tool.hatch.build.targets.wheel] packages = ["pydantic_ai"] +[tool.uv] +conflicts = [ + [ + { extra = "huggingface" }, + { extra = "outlines-vllm-offline" }, + ], +] + [tool.uv.sources] pydantic-graph = { workspace = true } diff --git a/tests/graph/test_persistence.py b/tests/graph/test_persistence.py index 4bb54947e4..8779fd1e2d 100644 --- a/tests/graph/test_persistence.py +++ b/tests/graph/test_persistence.py @@ -394,7 +394,7 @@ async def run(self, ctx: GraphRunContext[CountDownState]) -> CountDown | End[int state = CountDownState(counter=3) count_down_graph = Graph(nodes=[CountDown]) - await count_down_graph.initialize(CountDown(), state=state, persistence=persistence) + await count_down_graph.initialize(CountDown(), state=state, persistence=persistence, infer_name=False) last_snapshot = persistence.last_snapshot async with count_down_graph.iter_from_persistence(persistence) as run: diff --git a/tests/test_environments.py b/tests/test_environments.py new file mode 100644 index 0000000000..aad91adae6 --- /dev/null +++ b/tests/test_environments.py @@ -0,0 +1,2456 @@ +"""Tests for pydantic_ai.environments — ExecutionEnvironment, ExecutionEnvironmentToolset, LocalEnvironment, and MemoryEnvironment.""" + +from __future__ import annotations + +import io +import os +import struct +import tarfile +from pathlib import Path +from typing import Any +from unittest.mock import MagicMock, patch as mock_patch + +import pytest +from inline_snapshot import snapshot + +from pydantic_ai import Agent, BinaryContent, ToolCallPart +from pydantic_ai._run_context import RunContext +from pydantic_ai._tool_manager import ToolManager +from pydantic_ai.environments import ( + EnvToolName, + ExecutionEnvironment as BaseEnv, + ExecutionEnvironmentToolset, + ExecutionResult, +) +from pydantic_ai.environments._base import ( + apply_edit, + format_lines, +) +from pydantic_ai.environments.local import LocalEnvironment, _LocalEnvironmentProcess +from pydantic_ai.environments.memory import MemoryEnvironment +from pydantic_ai.exceptions import UnexpectedModelBehavior +from pydantic_ai.models.test import TestModel +from pydantic_ai.usage import RunUsage + +try: + from docker.errors import DockerException, NotFound as DockerNotFound + + from pydantic_ai.environments.docker import ( + DockerEnvironment, + _build_read_file_cmd, + _DockerEnvironmentProcess, + _put_file, + _shell_escape, + ) +except ImportError: # pragma: lax no cover + docker_installed = False +else: + docker_installed = True + +pytestmark = pytest.mark.anyio + + +def build_run_context(deps: Any = None, run_step: int = 0) -> RunContext[Any]: + return RunContext( + deps=deps, + model=TestModel(), + usage=RunUsage(), + prompt=None, + messages=[], + run_step=run_step, + ) + + +# --- Data types --- + + +def test_execute_result(): + result = ExecutionResult(output='hello\n', exit_code=0) + assert result.output == 'hello\n' + assert result.exit_code == 0 + + +# --- LocalEnvironment: execute --- + + +async def test_local_execute_basic(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + result = await env.shell('echo hello') + assert result.exit_code == 0 + assert 'hello' in result.output + + +async def test_local_execute_exit_code(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + result = await env.shell('exit 42') + assert result.exit_code == 42 + + +async def test_local_execute_timeout(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + result = await env.shell('sleep 10', timeout=0.5) + assert result.exit_code == -1 + assert 'timed out' in result.output.lower() + + +async def test_local_execute_stderr(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + result = await env.shell('echo error >&2') + assert 'error' in result.output + + +# --- LocalEnvironment: environment variables --- + + +async def test_local_env_vars_baseline(tmp_path: Path): + async with LocalEnvironment(tmp_path, env_vars={'MY_VAR': 'baseline'}) as env: + result = await env.shell('echo $MY_VAR') + assert 'baseline' in result.output + + +async def test_local_env_vars_per_call(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + result = await env.shell('echo $CALL_VAR', env={'CALL_VAR': 'per_call'}) + assert 'per_call' in result.output + + +async def test_local_env_vars_merged(tmp_path: Path): + async with LocalEnvironment(tmp_path, env_vars={'BASE': 'one'}) as env: + result = await env.shell('echo $BASE $EXTRA', env={'EXTRA': 'two'}) + assert 'one' in result.output + assert 'two' in result.output + + +async def test_local_env_vars_per_call_overrides_baseline(tmp_path: Path): + async with LocalEnvironment(tmp_path, env_vars={'VAR': 'old'}) as env: + result = await env.shell('echo $VAR', env={'VAR': 'new'}) + assert 'new' in result.output + assert 'old' not in result.output + + +async def test_local_inherit_env_true(tmp_path: Path): + os.environ['_TEST_INHERIT_CHECK'] = 'inherited' + try: + async with LocalEnvironment(tmp_path, inherit_env=True) as env: + result = await env.shell('echo $_TEST_INHERIT_CHECK') + assert 'inherited' in result.output + finally: + del os.environ['_TEST_INHERIT_CHECK'] + + +async def test_local_inherit_env_false(tmp_path: Path): + os.environ['_TEST_INHERIT_CHECK'] = 'should_not_see' + try: + async with LocalEnvironment(tmp_path, inherit_env=False) as env: + result = await env.shell('echo x${_TEST_INHERIT_CHECK}x') + assert result.output.strip() == 'xx' + finally: + del os.environ['_TEST_INHERIT_CHECK'] + + +async def test_local_inherit_env_false_with_explicit_vars(tmp_path: Path): + async with LocalEnvironment(tmp_path, env_vars={'ONLY_THIS': 'yes'}, inherit_env=False) as env: + result = await env.shell('/bin/echo $ONLY_THIS') + assert 'yes' in result.output + + +# --- LocalEnvironment: file operations --- + + +async def test_local_write_and_read(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('test.txt', 'line one\nline two\n') + content = await env.read_file('test.txt') + assert isinstance(content, str) + assert 'line one' in content + assert 'line two' in content + + +async def test_local_read_line_numbers(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('numbered.txt', 'alpha\nbeta\ngamma\n') + content = await env.read_file('numbered.txt') + assert content == snapshot("""\ + 1\talpha + 2\tbeta + 3\tgamma +""") + + +async def test_local_read_with_offset_limit(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + lines = '\n'.join(f'line {i}' for i in range(20)) + await env.write_file('long.txt', lines) + + content = await env.read_file('long.txt', offset=5, limit=3) + assert content == snapshot("""\ + 6\tline 5 + 7\tline 6 + 8\tline 7 +... (12 more lines. Use offset=8 to continue reading.) +""") + + +async def test_local_read_continuation_hint(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + lines = '\n'.join(f'line {i}' for i in range(20)) + await env.write_file('long.txt', lines) + + content = await env.read_file('long.txt', offset=0, limit=5) + assert content == snapshot("""\ + 1\tline 0 + 2\tline 1 + 3\tline 2 + 4\tline 3 + 5\tline 4 +... (15 more lines. Use offset=5 to continue reading.) +""") + + +async def test_local_read_offset_out_of_bounds(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('short.txt', 'one\ntwo\n') + with pytest.raises(ValueError, match='Offset 100 exceeds file length'): + await env.read_file('short.txt', offset=100) + + +async def test_local_read_directory_error(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + (tmp_path / 'subdir').mkdir() + with pytest.raises(FileNotFoundError, match='is a directory'): + await env.read_file('subdir') + + +async def test_local_read_nonexistent(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + with pytest.raises(FileNotFoundError): + await env.read_file('nonexistent.txt') + + +async def test_local_write_creates_parent_dirs(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('deep/nested/dir/file.txt', 'content') + content = await env.read_file('deep/nested/dir/file.txt') + assert isinstance(content, str) + assert 'content' in content + + +async def test_local_write_binary(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('binary.bin', b'\x00\x01\x02\x03') + assert (tmp_path / 'binary.bin').read_bytes() == b'\x00\x01\x02\x03' + + +async def test_local_read_file_bytes(tmp_path: Path): + # Create a minimal PNG (1x1 transparent pixel) + png_data = ( + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01' + b'\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89' + b'\x00\x00\x00\nIDATx\x9cc\x00\x01\x00\x00\x05\x00\x01' + b'\r\n\xb4\x00\x00\x00\x00IEND\xaeB`\x82' + ) + async with LocalEnvironment(tmp_path) as env: + await env.write_file('image.png', png_data) + result = await env.read_file('image.png') + assert isinstance(result, bytes) + assert result == png_data + + +# --- LocalEnvironment: edit_file --- + + +async def test_local_edit_single_replacement(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('edit.txt', 'foo bar baz') + count = await env.replace_str('edit.txt', 'bar', 'BAR') + assert count == 1 + content = (tmp_path / 'edit.txt').read_text() + assert content == 'foo BAR baz' + + +async def test_local_edit_replace_all(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('edit.txt', 'aaa bbb aaa') + count = await env.replace_str('edit.txt', 'aaa', 'xxx', replace_all=True) + assert count == 2 + content = (tmp_path / 'edit.txt').read_text() + assert content == 'xxx bbb xxx' + + +async def test_local_edit_not_found(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('edit.txt', 'hello world') + with pytest.raises(ValueError, match='not found'): + await env.replace_str('edit.txt', 'missing', 'replacement') + + +async def test_local_edit_ambiguous_without_replace_all(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('edit.txt', 'dup dup dup') + with pytest.raises(ValueError, match='3 times'): + await env.replace_str('edit.txt', 'dup', 'unique') + + +async def test_local_edit_nonexistent_file(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + with pytest.raises(FileNotFoundError): + await env.replace_str('missing.txt', 'old', 'new') + + +async def test_local_edit_multiline(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + await env.write_file('code.py', 'def foo():\n return "old"\n\nprint("test")\n') + count = await env.replace_str('code.py', 'def foo():\n return "old"', 'def foo():\n return "new"') + assert count == 1 + content = (tmp_path / 'code.py').read_text() + assert 'return "new"' in content + assert 'return "old"' not in content + assert 'print("test")' in content + + +# --- LocalEnvironment: create_process --- + + +async def test_local_create_process(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + proc = await env.create_process('echo interactive') + async with proc: + data = await proc.recv(timeout=5) + assert b'interactive' in data + + +async def test_local_create_process_env(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + proc = await env.create_process('echo $PROC_VAR', env={'PROC_VAR': 'from_process'}) + async with proc: + data = await proc.recv(timeout=5) + assert b'from_process' in data + + +async def test_local_create_process_stdin(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + # Use head -1 so the process exits after reading one line + proc = await env.create_process('head -1') + async with proc: + await proc.send(b'hello from stdin\n') + data = await proc.recv(timeout=5) + assert b'hello from stdin' in data + + +async def test_local_process_wait(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + proc = await env.create_process('exit 7') + async with proc: + rc = await proc.wait(timeout=5) + assert rc == 7 + + +async def test_local_process_kill(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + proc = await env.create_process('sleep 60') + # Don't use async with — we want to test manual kill + await proc.kill() + assert proc.returncode is not None + + +# --- LocalEnvironment: path traversal --- + + +async def test_local_path_traversal_blocked(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + with pytest.raises(PermissionError, match='outside the environment root'): + await env.read_file('../../../etc/passwd') + + +async def test_local_path_traversal_write_blocked(tmp_path: Path): + async with LocalEnvironment(tmp_path) as env: + with pytest.raises(PermissionError, match='outside the environment root'): + await env.write_file('../escape.txt', 'malicious') + + +# --- LocalEnvironment: creates root dir --- + + +async def test_local_creates_root_dir(tmp_path: Path): + root = tmp_path / 'new_root' + assert not root.exists() + async with LocalEnvironment(root) as env: + assert root.exists() + result = await env.shell('echo works') + assert 'works' in result.output + + +# --- ExecutionEnvironmentToolset --- + + +async def test_toolset_tool_names(): + toolset = ExecutionEnvironmentToolset(LocalEnvironment('.')) + ctx = build_run_context() + tools = await toolset.get_tools(ctx) + tool_names = sorted(tools.keys()) + assert tool_names == snapshot(['edit_file', 'read_file', 'shell', 'write_file']) + + +async def test_toolset_include_flags(): + toolset = ExecutionEnvironmentToolset( + LocalEnvironment('.'), + include=[], + ) + ctx = build_run_context() + tools = await toolset.get_tools(ctx) + assert tools == {} + + +async def test_toolset_include_shell_only(): + toolset = ExecutionEnvironmentToolset( + LocalEnvironment('.'), + include=['shell'], + ) + ctx = build_run_context() + tools = await toolset.get_tools(ctx) + assert sorted(tools.keys()) == ['shell'] + + +async def test_toolset_bash_tool(tmp_path: Path): + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + result = await manager.handle_call(ToolCallPart(tool_name='shell', args={'command': 'echo hello'})) + assert result == snapshot("""\ +hello + +Exit code: 0\ +""") + + +async def test_toolset_read_write_tools(tmp_path: Path): + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + # Write + write_result = await manager.handle_call( + ToolCallPart(tool_name='write_file', args={'path': 'test.txt', 'content': 'hello world'}) + ) + assert write_result == snapshot('File written: test.txt') + + # Read + read_result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': 'test.txt'})) + assert read_result == snapshot(' 1\thello world\n') + + +async def test_toolset_edit_retry_on_error(tmp_path: Path): + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env, max_retries=0) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + await env.write_file('test.txt', 'content') + + # Edit with non-matching string: ModelRetry is raised by tool, but with max_retries=0 + # the ToolManager wraps it into UnexpectedModelBehavior + with pytest.raises(UnexpectedModelBehavior, match='exceeded max retries count of 0'): + await manager.handle_call( + ToolCallPart( + tool_name='edit_file', + args={'path': 'test.txt', 'old': 'nonexistent', 'new': 'replacement'}, + ) + ) + + +async def test_toolset_edit_retry_on_permission_error(tmp_path: Path): + """edit_file raises ModelRetry on PermissionError (e.g. path traversal).""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env, max_retries=0) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + with pytest.raises(UnexpectedModelBehavior, match='exceeded max retries count of 0'): + await manager.handle_call( + ToolCallPart( + tool_name='edit_file', + args={'path': '../../etc/passwd', 'old': 'root', 'new': 'hacked'}, + ) + ) + + +# --- ExecutionEnvironmentToolset: error handling --- + + +async def test_toolset_read_nonexistent_returns_error(tmp_path: Path): + """read_file on a nonexistent file returns an error string instead of crashing.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': 'nope.txt'})) + assert 'Error:' in str(result) + + +async def test_toolset_read_path_traversal_returns_error(tmp_path: Path): + """read_file with path traversal returns an error string.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': '../../etc/passwd'})) + assert 'Error:' in str(result) + + +async def test_toolset_write_path_traversal_returns_error(tmp_path: Path): + """write_file with path traversal returns an error string.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + result = await manager.handle_call( + ToolCallPart(tool_name='write_file', args={'path': '../../tmp/evil.txt', 'content': 'bad'}) + ) + assert 'Error:' in str(result) + + +async def test_toolset_read_offset_out_of_bounds_returns_error(tmp_path: Path): + """read_file with offset past EOF returns an error string.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + await env.write_file('short.txt', 'one\ntwo\n') + + result = await manager.handle_call( + ToolCallPart(tool_name='read_file', args={'path': 'short.txt', 'offset': 100}) + ) + assert 'Error:' in str(result) + assert 'Offset 100 exceeds' in str(result) + + +async def test_toolset_read_continuation_hint(tmp_path: Path): + """read_file includes continuation hint when there are more lines.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + lines = '\n'.join(f'line {i}' for i in range(20)) + await env.write_file('long.txt', lines) + + result = await manager.handle_call( + ToolCallPart(tool_name='read_file', args={'path': 'long.txt', 'offset': 0, 'limit': 5}) + ) + assert result == snapshot("""\ + 1 line 0 + 2 line 1 + 3 line 2 + 4 line 3 + 5 line 4 +... (15 more lines. Use offset=5 to continue reading.) +""") + + +# --- ExecutionEnvironmentToolset: approval flags --- + + +async def test_toolset_require_shell_approval(): + """require_shell_approval sets requires_approval on the shell tool.""" + env = MemoryEnvironment(command_handler=lambda cmd: ExecutionResult(output='', exit_code=0)) + toolset = ExecutionEnvironmentToolset(env, require_shell_approval=True) + ctx = build_run_context(None) + tools = await toolset.get_tools(ctx) + assert tools['shell'].tool_def.kind == 'unapproved' + # Other tools should be normal + assert tools['read_file'].tool_def.kind == 'function' + + +async def test_toolset_require_write_approval(): + """require_write_approval sets requires_approval on write_file and edit_file.""" + toolset = ExecutionEnvironmentToolset(MemoryEnvironment(), require_write_approval=True) + ctx = build_run_context(None) + tools = await toolset.get_tools(ctx) + assert tools['write_file'].tool_def.kind == 'unapproved' + assert tools['edit_file'].tool_def.kind == 'unapproved' + # read_file should NOT require approval + assert tools['read_file'].tool_def.kind == 'function' + + +async def test_toolset_default_no_approval(): + """By default, no tools require approval.""" + toolset = ExecutionEnvironmentToolset(MemoryEnvironment()) + ctx = build_run_context(None) + tools = await toolset.get_tools(ctx) + for tool in tools.values(): + assert tool.tool_def.kind == 'function' + + +# --- ExecutionEnvironmentToolset: environment management --- + + +async def test_toolset_environment_property(): + env = LocalEnvironment('.') + toolset = ExecutionEnvironmentToolset(env) + assert toolset.environment is env + assert toolset.required_environment is env + + +async def test_toolset_no_environment_returns_none(): + toolset = ExecutionEnvironmentToolset() + assert toolset.environment is None + + +async def test_toolset_no_environment_required_raises(): + toolset = ExecutionEnvironmentToolset() + with pytest.raises(RuntimeError, match='No execution environment configured'): + _ = toolset.required_environment + + +async def test_toolset_use_environment(): + env1 = LocalEnvironment('/tmp/env1') + env2 = LocalEnvironment('/tmp/env2') + toolset = ExecutionEnvironmentToolset(env1) + + assert toolset.environment is env1 + with toolset.use_environment(env2): + assert toolset.environment is env2 + assert toolset.environment is env1 + + +async def test_toolset_use_environment_no_default(): + env = LocalEnvironment('.') + toolset = ExecutionEnvironmentToolset() + + assert toolset.environment is None + + with toolset.use_environment(env): + assert toolset.environment is env + + assert toolset.environment is None + + +async def test_toolset_tool_name_conflict_hint(): + toolset = ExecutionEnvironmentToolset(LocalEnvironment('.')) + assert 'PrefixedToolset' in toolset.tool_name_conflict_hint + + +# --- ExecutionEnvironmentToolset: lifecycle --- + + +async def test_toolset_enter_no_environment_raises(): + toolset = ExecutionEnvironmentToolset() + with pytest.raises(RuntimeError, match='No execution environment configured'): + async with toolset: + pass + + +async def test_toolset_lifecycle(tmp_path: Path): + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + + async with toolset: + result = await env.shell('echo lifecycle') + assert 'lifecycle' in result.output + + +# --- ExecutionEnvironmentToolset: image support --- + + +async def test_toolset_image_support_disabled(tmp_path: Path): + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env, image_support=False) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + await env.write_file('photo.png', b'\x89PNG\r\n\x1a\n') + result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': 'photo.png'})) + assert result == snapshot('[Image file: photo.png — image_support is disabled on this toolset]') + + +# --- MemoryEnvironment --- + + +async def test_memory_read_write(): + async with MemoryEnvironment() as env: + await env.write_file('test.txt', 'hello world\n') + content = await env.read_file('test.txt') + assert content == snapshot("""\ + 1\thello world +""") + + +async def test_memory_initial_files(): + env = MemoryEnvironment(files={'a.txt': 'alpha', 'b.txt': 'beta'}) + async with env: + assert env.files == {'a.txt': 'alpha', 'b.txt': 'beta'} + a = await env.read_file('a.txt') + assert isinstance(a, str) + assert 'alpha' in a + b = await env.read_file('b.txt') + assert isinstance(b, str) + assert 'beta' in b + + +async def test_memory_read_nonexistent(): + async with MemoryEnvironment() as env: + with pytest.raises(FileNotFoundError): + await env.read_file('nope.txt') + + +async def test_memory_read_directory_error(): + env = MemoryEnvironment(files={'dir/file.txt': 'content'}) + async with env: + with pytest.raises(FileNotFoundError, match='is a directory'): + await env.read_file('dir') + + +async def test_memory_read_offset_limit(): + lines = '\n'.join(f'line {i}' for i in range(20)) + env = MemoryEnvironment(files={'long.txt': lines}) + async with env: + content = await env.read_file('long.txt', offset=5, limit=3) + assert isinstance(content, str) + assert 'line 5' in content + assert 'line 7' in content + assert 'line 4' not in content + assert 'line 8' not in content + + +async def test_memory_read_continuation_hint(): + lines = '\n'.join(f'line {i}' for i in range(20)) + env = MemoryEnvironment(files={'long.txt': lines}) + async with env: + content = await env.read_file('long.txt', offset=0, limit=5) + assert isinstance(content, str) + assert '15 more lines' in content + assert 'offset=5' in content + + +async def test_memory_read_offset_out_of_bounds(): + env = MemoryEnvironment(files={'short.txt': 'one\ntwo\n'}) + async with env: + with pytest.raises(ValueError, match='Offset 100 exceeds'): + await env.read_file('short.txt', offset=100) + + +async def test_memory_edit_file(): + env = MemoryEnvironment(files={'code.py': 'old_value = 1'}) + async with env: + count = await env.replace_str('code.py', 'old_value', 'new_value') + assert count == 1 + content = await env.read_file('code.py') + assert isinstance(content, str) + assert 'new_value' in content + assert 'old_value' not in content + + +async def test_memory_edit_file_not_found(): + async with MemoryEnvironment() as env: + with pytest.raises(FileNotFoundError): + await env.replace_str('nope.txt', 'a', 'b') + + +async def test_memory_edit_string_not_found(): + env = MemoryEnvironment(files={'f.txt': 'hello'}) + async with env: + with pytest.raises(ValueError, match='not found'): + await env.replace_str('f.txt', 'missing', 'replacement') + + +async def test_memory_edit_ambiguous(): + env = MemoryEnvironment(files={'f.txt': 'dup dup dup'}) + async with env: + with pytest.raises(ValueError, match='3 times'): + await env.replace_str('f.txt', 'dup', 'x') + + +async def test_memory_edit_replace_all(): + env = MemoryEnvironment(files={'f.txt': 'aaa bbb aaa'}) + async with env: + count = await env.replace_str('f.txt', 'aaa', 'xxx', replace_all=True) + assert count == 2 + content = await env.read_file('f.txt') + assert isinstance(content, str) + assert 'xxx bbb xxx' in content + + +async def test_memory_execute_with_handler(): + def handler(cmd: str) -> ExecutionResult: + return ExecutionResult(output=f'ran: {cmd}\n', exit_code=0) + + async with MemoryEnvironment(command_handler=handler) as env: + result = await env.shell('echo hello') + assert result.output == 'ran: echo hello\n' + assert result.exit_code == 0 + + +async def test_memory_execute_no_handler(): + async with MemoryEnvironment() as env: + with pytest.raises(RuntimeError, match='no command_handler'): + await env.shell('echo hello') + + +async def test_memory_create_process_not_supported(): + async with MemoryEnvironment() as env: + with pytest.raises(NotImplementedError): + await env.create_process('echo hello') + + +async def test_memory_write_binary(): + async with MemoryEnvironment() as env: + await env.write_file('data.bin', b'\x00\x01\x02') + # Non-image binary files are returned as text (decoded) + content = await env.read_file('data.bin') + assert isinstance(content, str) + + +async def test_memory_read_file_bytes(): + png_data = b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR' + env = MemoryEnvironment(files={'img.png': png_data}) + async with env: + result = await env.read_file('img.png') + assert isinstance(result, bytes) + assert result == png_data + + +# --- MemoryEnvironment with ExecutionEnvironmentToolset --- + + +async def test_memory_toolset_integration(): + """MemoryEnvironment works with ExecutionEnvironmentToolset for full agent testing.""" + env = MemoryEnvironment(files={'main.py': 'print("hello")\n'}) + toolset = ExecutionEnvironmentToolset(env, exclude=['shell']) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + # read_file + result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': 'main.py'})) + assert result == snapshot(' 1\tprint("hello")\n') + + # write_file + result = await manager.handle_call( + ToolCallPart(tool_name='write_file', args={'path': 'new.py', 'content': 'x = 1'}) + ) + assert result == snapshot('File written: new.py') + + +# --- Agent-level integration test --- + + +async def test_agent_with_execution_toolset(): + """Agent with ExecutionEnvironmentToolset runs end-to-end using TestModel and MemoryEnvironment.""" + + env = MemoryEnvironment( + files={'data.txt': 'hello world\n'}, + command_handler=lambda cmd: ExecutionResult(output=f'executed: {cmd}\n', exit_code=0), + ) + toolset = ExecutionEnvironmentToolset(env) + + agent = Agent('test', toolsets=[toolset]) + + async with env: + result = await agent.run('Read the file data.txt') + # The TestModel will call tools and we verify it completes without error + assert result.output is not None + + +# pyright: reportPrivateUsage=false, reportUnknownMemberType=false, reportUnknownArgumentType=false, reportUnknownVariableType=false, reportPossiblyUnboundVariable=false + + +# --- _base.py helper functions --- + + +def test_format_lines_empty_file(): + """format_lines on empty string returns just a newline.""" + result = format_lines('', 0, 2000) + assert result == '\n' + + +def test_format_lines_trailing_newline(): + """format_lines adds trailing newline when text doesn't end with one.""" + result = format_lines('no trailing newline', 0, 2000) + assert result.endswith('\n') + assert '1\tno trailing newline' in result + + +def test_apply_edit_basic(): + new_text, count = apply_edit('hello world', 'world', 'earth', 'test.txt', replace_all=False) + assert new_text == 'hello earth' + assert count == 1 + + +def test_apply_edit_replace_all(): + new_text, count = apply_edit('aaa bbb aaa', 'aaa', 'xxx', 'test.txt', replace_all=True) + assert new_text == 'xxx bbb xxx' + assert count == 2 + + +def test_apply_edit_not_found(): + with pytest.raises(ValueError, match='not found'): + apply_edit('hello', 'missing', 'x', 'test.txt', replace_all=False) + + +def test_apply_edit_ambiguous(): + with pytest.raises(ValueError, match='2 times'): + apply_edit('aa bb aa', 'aa', 'x', 'test.txt', replace_all=False) + + +# --- LocalEnvironment: additional edge cases --- + + +async def test_local_execute_no_timeout(tmp_path: Path): + """execute() with timeout=None completes without timeout.""" + async with LocalEnvironment(tmp_path) as env: + result = await env.shell('echo no_timeout', timeout=None) + assert result.exit_code == 0 + assert 'no_timeout' in result.output + + +async def test_local_execute_invalid_timeout(tmp_path: Path): + """execute() with non-positive timeout raises ValueError.""" + async with LocalEnvironment(tmp_path) as env: + with pytest.raises(ValueError, match='timeout must be positive or None'): + await env.shell('echo test', timeout=0) + with pytest.raises(ValueError, match='timeout must be positive or None'): + await env.shell('echo test', timeout=-1) + + +async def test_local_read_file_bytes_directory(tmp_path: Path): + """read_file_bytes on a directory raises FileNotFoundError.""" + async with LocalEnvironment(tmp_path) as env: + (tmp_path / 'adir').mkdir() + with pytest.raises(FileNotFoundError, match='is a directory'): + await env.read_file('adir') + + +async def test_local_read_file_bytes_nonexistent(tmp_path: Path): + """read_file_bytes on a nonexistent file raises FileNotFoundError.""" + async with LocalEnvironment(tmp_path) as env: + with pytest.raises(FileNotFoundError): + await env.read_file('nope.bin') + + +# --- MemoryEnvironment: additional edge cases --- + + +async def test_memory_normalize_paths(): + """MemoryEnvironment normalizes paths correctly.""" + async with MemoryEnvironment() as env: + await env.write_file('./test.txt', 'content') + content = await env.read_file('test.txt') + assert isinstance(content, str) + assert 'content' in content + + +async def test_memory_normalize_leading_slash(): + """MemoryEnvironment strips leading slashes.""" + async with MemoryEnvironment() as env: + await env.write_file('/test.txt', 'content') + content = await env.read_file('test.txt') + assert isinstance(content, str) + assert 'content' in content + + +async def test_memory_read_file_text(): + """read_file on text file returns formatted string.""" + env = MemoryEnvironment(files={'text.txt': 'hello'}) + async with env: + result = await env.read_file('text.txt') + assert isinstance(result, str) + assert 'hello' in result + + +async def test_memory_read_file_not_found(): + """read_file on missing file raises FileNotFoundError.""" + async with MemoryEnvironment() as env: + with pytest.raises(FileNotFoundError): + await env.read_file('missing.txt') + + +async def test_memory_edit_binary(): + """edit_file works on binary content.""" + env = MemoryEnvironment(files={'data.txt': b'hello world'}) + async with env: + count = await env.replace_str('data.txt', 'world', 'earth') + assert count == 1 + + +# --- ExecutionEnvironmentToolset: additional coverage --- + + +async def test_toolset_image_too_large(tmp_path: Path): + """read_file on an image that's too large returns error string.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env, max_image_bytes=10) # Very small limit + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + # Write a PNG file that exceeds the limit + await env.write_file('big.png', b'\x89PNG\r\n\x1a\n' + b'\x00' * 100) + result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': 'big.png'})) + assert 'Image too large' in str(result) + + +async def test_toolset_image_read(tmp_path: Path): + """read_file on an image returns BinaryContent.""" + + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + png_data = ( + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01' + b'\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89' + b'\x00\x00\x00\nIDATx\x9cc\x00\x01\x00\x00\x05\x00\x01' + b'\r\n\xb4\x00\x00\x00\x00IEND\xaeB`\x82' + ) + await env.write_file('img.png', png_data) + result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': 'img.png'})) + assert isinstance(result, BinaryContent) + assert result.media_type == 'image/png' + + +async def test_toolset_read_binary_non_image(): + """read_file on a non-image binary file returns a placeholder message.""" + # Store invalid UTF-8 bytes under a non-image extension so MemoryEnvironment returns raw bytes + env = MemoryEnvironment(files={'data.bin': b'\x80\x81\x82'}) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + async with env: + result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': 'data.bin'})) + assert result == '[Binary file: data.bin — cannot display as text]' + + +async def test_toolset_edit_success(tmp_path: Path): + """edit_file tool returns success message.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context(None) + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + await env.write_file('code.py', 'old_value = 1\n') + result = await manager.handle_call( + ToolCallPart( + tool_name='edit_file', + args={'path': 'code.py', 'old': 'old_value', 'new': 'new_value'}, + ) + ) + assert result == snapshot('Replaced 1 occurrence in code.py.') + + +async def test_toolset_lifecycle_ref_counting(tmp_path: Path): + """Multiple context manager entries share the environment.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + + async with toolset: + async with toolset: + # Both entries active + result = await env.shell('echo shared') + assert 'shared' in result.output + # Still alive after one exit + result = await env.shell('echo still_alive') + assert 'still_alive' in result.output + + +# --- DockerEnvironment: mocked tests --- + + +def _make_tar(filename: str, data: bytes) -> bytes: + """Create a tar archive with a single file.""" + f = io.BytesIO() + with tarfile.open(fileobj=f, mode='w') as tar: + info = tarfile.TarInfo(name=filename) + info.size = len(data) + tar.addfile(info, io.BytesIO(data)) + f.seek(0) + return f.read() + + +class MockContainer: + """Mock Docker container for testing.""" + + def __init__(self) -> None: + self._files: dict[str, bytes] = {} + self.id = 'mock-container-id' + self.status = 'running' + self.client = MagicMock() + + def exec_run( # noqa: C901 + self, + cmd: list[str] | str, + workdir: str | None = None, + environment: dict[str, str] | None = None, + **kwargs: Any, + ) -> tuple[int, bytes]: + """Simulate exec_run by executing simple commands.""" + if isinstance(cmd, list): + cmd_str = ' '.join(cmd) + else: + cmd_str = cmd # pragma: no cover + + # Handle mkdir -p + if 'mkdir -p' in cmd_str: + return 0, b'' + + # Handle awk (read_file) + if 'awk' in cmd_str: + # Parse start/end from the awk NR range: NR>=start && NR<=end + import re as _re + + nr_match = _re.search(r'NR>=(\d+) && NR<=(\d+)', cmd_str) + start = int(nr_match.group(1)) if nr_match else 1 + end = int(nr_match.group(2)) if nr_match else 2000 + # Try to find the file by matching path in the awk command. + for fpath, data in self._files.items(): + name = fpath.rsplit('/', 1)[-1] if '/' in fpath else fpath + if name in cmd_str or fpath in cmd_str: # pragma: no branch + text = data.decode('utf-8', errors='replace') + lines = text.splitlines(keepends=True) + total = len(lines) + # Offset exceeds file length + if total > 0 and total < start: + return 0, f'__OFFSET_ERROR__:{total}\n'.encode() + selected = lines[start - 1 : end] + numbered = [f'{i:>6}\t{line}' for i, line in enumerate(selected, start=start)] + result = ''.join(numbered) + remaining = total - end + if remaining > 0: + result += f'... ({remaining} more lines. Use offset={end} to continue reading.)\n' + return 0, result.encode('utf-8') + return 1, b'File not found' + + # Handle find (glob) + if 'find' in cmd_str: + # Extract the search path from: find '' ... + import shlex as _shlex + + find_part = cmd_str.split('|')[0].strip() + # Remove the 'sh -c' wrapper if present + if find_part.startswith('sh -c '): # pragma: no branch + find_part = find_part[len('sh -c ') :] + tokens = _shlex.split(find_part) + # tokens[0] is 'find', tokens[1] is the path + search_path = tokens[1] if len(tokens) > 1 else '.' + wd = workdir or '/workspace' + matches = [] + for fpath in sorted(self._files): + # Make path relative to workdir + if not fpath.startswith(wd + '/'): + continue + rel = fpath[len(wd) + 1 :] + if search_path == '.': + matches.append(f'./{rel}') + elif rel.startswith(search_path + '/') or rel == search_path: + matches.append(rel) + return 0, '\n'.join(matches).encode('utf-8') + + # Handle general commands + if 'echo' in cmd_str: + # Extract the echo argument + msg = cmd_str.split('echo ', 1)[-1] if 'echo ' in cmd_str else '' + return 0, (msg + '\n').encode('utf-8') + + if 'exit' in cmd_str: # pragma: no cover + return 1, b'' + + return 0, b'' # pragma: no cover + + def put_archive(self, path: str, data: Any) -> bool: + """Simulate file upload by extracting tar data.""" + tar_data = data.read() if hasattr(data, 'read') else data + with tarfile.open(fileobj=io.BytesIO(tar_data)) as tar: + for member in tar.getmembers(): + extracted = tar.extractfile(member) + if extracted: # pragma: no branch + full_path = f'{path}/{member.name}' if path != '.' else member.name + self._files[full_path] = extracted.read() + return True + + def get_archive(self, path: str) -> tuple[list[bytes], dict[str, Any]]: + """Simulate file download.""" + if path not in self._files: + # Check if file exists at resolved path + for fpath, data in self._files.items(): # pragma: no cover + if fpath.endswith(path) or path.endswith(fpath.split('/')[-1]): + return [_make_tar(fpath.split('/')[-1], data)], {} # pragma: no cover + raise DockerNotFound('File not found') # pragma: no cover + data = self._files[path] + return [_make_tar(path.split('/')[-1], data)], {} + + def stop(self, timeout: int = 5) -> None: # pragma: no cover + self.status = 'stopped' + + def remove(self, force: bool = False) -> None: + pass + + def reload(self) -> None: + pass + + +@pytest.fixture +def mock_container() -> MockContainer: + return MockContainer() + + +@pytest.fixture +def mock_docker_sandbox(mock_container: MockContainer) -> Any: + """Create a DockerEnvironment with a mock container.""" + sandbox = DockerEnvironment(image='python:3.12-slim') + sandbox._container = mock_container # type: ignore[assignment] + sandbox._client = MagicMock() + return sandbox + + +@pytest.mark.skipif(not docker_installed, reason='docker package not installed') +class TestDocker: + async def test_docker_execute(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.execute runs commands in container.""" + result = await mock_docker_sandbox.shell('echo hello') + assert result.exit_code == 0 + assert isinstance(result.output, str) + + async def test_docker_execute_timeout(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.execute wraps command with timeout.""" + result = await mock_docker_sandbox.shell('echo test', timeout=30) + assert result.exit_code == 0 + + async def test_docker_execute_no_timeout(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.execute with timeout=None.""" + result = await mock_docker_sandbox.shell('echo test', timeout=None) + assert result.exit_code == 0 + + async def test_docker_execute_invalid_timeout(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.execute with non-positive timeout raises ValueError.""" + with pytest.raises(ValueError, match='timeout must be positive or None'): + await mock_docker_sandbox.shell('echo test', timeout=0) + with pytest.raises(ValueError, match='timeout must be positive or None'): + await mock_docker_sandbox.shell('echo test', timeout=-1) + + async def test_docker_execute_with_env(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.execute passes env vars.""" + result = await mock_docker_sandbox.shell('echo test', env={'KEY': 'value'}) + assert result.exit_code == 0 + + async def test_docker_write_read_file(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment write and read files.""" + await mock_docker_sandbox.write_file('test.txt', 'hello world\n') + content = await mock_docker_sandbox.read_file('test.txt') + assert isinstance(content, str) + + async def test_docker_write_file_binary(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment write binary file.""" + await mock_docker_sandbox.write_file('data.bin', b'\x00\x01\x02') + + async def test_docker_read_file_not_found(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.read_file on missing file raises FileNotFoundError.""" + with pytest.raises(FileNotFoundError): + await mock_docker_sandbox.read_file('nonexistent.txt') + + async def test_docker_read_file_offset_out_of_range( + self, mock_docker_sandbox: Any, mock_container: MockContainer + ) -> None: + """DockerEnvironment.read_file raises ValueError when offset exceeds file length.""" + mock_container._files['/workspace/small.txt'] = b'line1\nline2\nline3\n' + with pytest.raises(ValueError, match='Offset 10 exceeds file length \\(3 lines\\)'): + await mock_docker_sandbox.read_file('small.txt', offset=10) + + async def test_docker_read_file_with_offset(self, mock_docker_sandbox: Any, mock_container: MockContainer) -> None: + """DockerEnvironment.read_file respects offset and limit.""" + mock_container._files['/workspace/lines.txt'] = b'a\nb\nc\nd\ne\n' + result = await mock_docker_sandbox.read_file('lines.txt', offset=2, limit=2) + assert isinstance(result, str) + assert ' 3\tc\n' in result + assert ' 4\td\n' in result + assert '... (1 more lines. Use offset=4 to continue reading.)' in result + + async def test_docker_read_file_image(self, mock_docker_sandbox: Any, mock_container: MockContainer) -> None: + """DockerEnvironment.read_file returns raw bytes for image files.""" + png_data = b'\x89PNG\r\n\x1a\n' + mock_container._files['/workspace/image.png'] = png_data + result = await mock_docker_sandbox.read_file('image.png') + assert isinstance(result, bytes) + assert result == png_data + + async def test_docker_edit_file(self, mock_docker_sandbox: Any, mock_container: MockContainer) -> None: + """DockerEnvironment.edit_file replaces text.""" + mock_container._files['/workspace/code.py'] = b'old_value = 1' + count = await mock_docker_sandbox.replace_str('code.py', 'old_value', 'new_value') + assert count == 1 + + async def test_docker_container_property(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment._required_container raises when not started.""" + + sandbox = DockerEnvironment() + with pytest.raises(RuntimeError, match='not started'): + _ = sandbox._required_container + + async def test_docker_create_process(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.create_process returns a _DockerEnvironmentProcess.""" + proc = await mock_docker_sandbox.create_process('echo test') + assert proc is not None + + async def test_docker_is_alive(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.is_alive checks container status.""" + result = await mock_docker_sandbox.is_alive() + assert result is True + + async def test_docker_is_alive_not_started( + self, + ) -> None: + """DockerEnvironment.is_alive returns False when not started.""" + + sandbox = DockerEnvironment() + result = await sandbox.is_alive() + assert result is False + + async def test_docker_resolve_path(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment._resolve_path resolves relative paths.""" + assert mock_docker_sandbox._resolve_path('test.txt') == '/workspace/test.txt' + assert mock_docker_sandbox._resolve_path('/abs/path') == '/abs/path' + assert mock_docker_sandbox._resolve_path('sub/dir/file.py') == '/workspace/sub/dir/file.py' + + def test_docker_put_file(self) -> None: + """_put_file creates a tar archive and uploads it.""" + + container = MockContainer() + _put_file(container, '/workspace/test.txt', b'hello') # type: ignore[arg-type] + assert '/workspace/test.txt' in container._files + assert container._files['/workspace/test.txt'] == b'hello' + + def test_docker_sandbox_process_read_frame(self) -> None: + """_DockerEnvironmentProcess._read_frame parses multiplexed stream frames.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + # Create a mock socket with a multiplexed frame + stdout_data = b'hello from stdout' + header = struct.pack('>BxxxI', 1, len(stdout_data)) # stream_type=1 (stdout) + + mock_socket = MagicMock() + mock_socket.recv.side_effect = [header, stdout_data] + proc._socket = mock_socket + + stream_type, data = proc._read_frame() + assert stream_type == 1 + assert data == stdout_data + + def test_docker_sandbox_process_read_frame_stderr(self) -> None: + """_DockerEnvironmentProcess._read_frame handles stderr frames.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + stderr_data = b'error output' + header = struct.pack('>BxxxI', 2, len(stderr_data)) # stream_type=2 (stderr) + + mock_socket = MagicMock() + mock_socket.recv.side_effect = [header, stderr_data] + proc._socket = mock_socket + + stream_type, data = proc._read_frame() + assert stream_type == 2 + assert data == stderr_data + + def test_docker_sandbox_process_read_frame_eof(self) -> None: + """_DockerEnvironmentProcess._read_frame returns empty on EOF.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + mock_socket = MagicMock() + mock_socket.recv.return_value = b'' # EOF + proc._socket = mock_socket + + stream_type, data = proc._read_frame() + assert stream_type == 0 + assert data == b'' + assert proc._eof is True + + def test_docker_sandbox_process_read_frame_zero_size(self) -> None: + """_DockerEnvironmentProcess._read_frame handles zero-size frames.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + header = struct.pack('>BxxxI', 1, 0) # zero size + + mock_socket = MagicMock() + mock_socket.recv.return_value = header + proc._socket = mock_socket + + stream_type, data = proc._read_frame() + assert stream_type == 1 + assert data == b'' + + def test_docker_sandbox_process_already_eof(self) -> None: + """_DockerEnvironmentProcess._read_frame returns empty when already at EOF.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._eof = True + + stream_type, data = proc._read_frame() + assert stream_type == 0 + assert data == b'' + + def test_docker_hardened_constructor( + self, + ): + """DockerEnvironment.hardened() returns a properly configured instance.""" + env = DockerEnvironment.hardened(image='python:3.12-slim', memory_limit='1g') + assert env._network_disabled is True + assert env._read_only is True + assert env._cap_drop == ['ALL'] + assert env._memory_limit == '1g' + assert env._user == 'nobody' + assert env._init is True + + def test_docker_setup_early_return( + self, + ): + """DockerEnvironment._setup returns early if container already exists.""" + env = DockerEnvironment(image='python:3.12-slim') + env._container = MagicMock() + env._setup() # should not create a new container + assert env._client is None # docker.from_env() was never called + + async def test_docker_process_recv_stderr_no_buffer( + self, + ) -> None: + """_DockerEnvironmentProcess.recv_stderr without buffered data (no timeout).""" + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + stderr_data = b'error output' + header = struct.pack('>BxxxI', 2, len(stderr_data)) + mock_socket = MagicMock() + mock_socket.recv.side_effect = [header, stderr_data] + proc._socket = mock_socket + + result = await proc.recv_stderr() + assert result == stderr_data + + async def test_docker_process_recv_stream_buffers_stdout( + self, + ) -> None: + """_DockerEnvironmentProcess._recv_stream buffers stdout when stderr is wanted.""" + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + # First frame is stdout (type 1), second is stderr (type 2) + stdout_data = b'stdout output' + stderr_data = b'stderr output' + stdout_header = struct.pack('>BxxxI', 1, len(stdout_data)) + stderr_header = struct.pack('>BxxxI', 2, len(stderr_data)) + + mock_socket = MagicMock() + mock_socket.recv.side_effect = [stdout_header, stdout_data, stderr_header, stderr_data] + proc._socket = mock_socket + + # Requesting stderr should buffer stdout and return stderr + result = await proc.recv_stderr() + assert result == stderr_data + assert proc._stdout_buffer == [stdout_data] + + async def test_docker_process_wait_no_timeout( + self, + ) -> None: + """_DockerEnvironmentProcess.wait without timeout polls until returncode is set.""" + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._exec_id = 'exec-123' + # Mock exec_inspect to return "still running" first, then "exited" + call_count = 0 + + def mock_inspect(exec_id: str) -> dict[str, Any]: + nonlocal call_count + call_count += 1 + if call_count <= 1: + return {'Running': True, 'ExitCode': None} + return {'Running': False, 'ExitCode': 0} + + container.client.api.exec_inspect = mock_inspect + result = await proc.wait() + assert result == 0 + assert call_count >= 2 + + async def test_docker_process_wait_with_timeout( + self, + ) -> None: + """_DockerEnvironmentProcess.wait with timeout.""" + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._returncode = 42 + result = await proc.wait(timeout=5.0) + assert result == 42 + + async def test_docker_read_file_unicode_error( + self, mock_docker_sandbox: Any, mock_container: MockContainer + ) -> None: + """DockerEnvironment.read_file falls back to raw bytes on UnicodeDecodeError.""" + # Store a binary file (not an image extension) that will fail utf-8 decode + binary_data = b'\x80\x81\x82\xff' + mock_container._files['/workspace/data.bin'] = binary_data + + # Make the awk command return non-utf8 data to trigger UnicodeDecodeError + original = mock_container.exec_run + + def exec_with_binary(cmd: Any, **kwargs: Any) -> tuple[int, bytes]: + cmd_str = ' '.join(cmd) if isinstance(cmd, list) else cmd + if 'awk' in cmd_str and 'data.bin' in cmd_str: + return 0, b'\x80\x81\x82\xff' + return original(cmd, **kwargs) # pragma: no cover + + mock_container.exec_run = exec_with_binary # type: ignore[assignment] + result = await mock_docker_sandbox.read_file('data.bin') + assert isinstance(result, bytes) + + async def test_docker_is_alive_exception(self, mock_docker_sandbox: Any, mock_container: MockContainer) -> None: + """DockerEnvironment.is_alive returns False when reload raises.""" + mock_container.reload = MagicMock(side_effect=DockerException('connection error')) + result = await mock_docker_sandbox.is_alive() + assert result is False + + async def test_docker_is_alive_running(self, mock_docker_sandbox: Any) -> None: + """DockerEnvironment.is_alive returns True when running.""" + result = await mock_docker_sandbox.is_alive() + assert result is True + + async def test_docker_process_recv_with_buffered_data( + self, + ) -> None: + """_DockerEnvironmentProcess.recv returns buffered stdout data first.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._stdout_buffer.append(b'buffered data') + + result = await proc.recv() + assert result == b'buffered data' + assert proc._stdout_buffer == [] + + async def test_docker_process_recv_stderr_with_buffered_data( + self, + ) -> None: + """_DockerEnvironmentProcess.recv_stderr returns buffered stderr data first.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._stderr_buffer.append(b'buffered error') + + result = await proc.recv_stderr() + assert result == b'buffered error' + assert proc._stderr_buffer == [] + + async def test_docker_process_recv_stream_buffers_other( + self, + ) -> None: + """_DockerEnvironmentProcess._recv_stream buffers frames for the other stream.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + # First frame is stderr (type 2), second is stdout (type 1) + stderr_data = b'error output' + stdout_data = b'stdout output' + stderr_header = struct.pack('>BxxxI', 2, len(stderr_data)) + stdout_header = struct.pack('>BxxxI', 1, len(stdout_data)) + + mock_socket = MagicMock() + mock_socket.recv.side_effect = [stderr_header, stderr_data, stdout_header, stdout_data] + proc._socket = mock_socket + + # Requesting stdout should buffer stderr and return stdout + result = await proc.recv() + assert result == stdout_data + assert proc._stderr_buffer == [stderr_data] + + async def test_docker_process_recv_stream_eof( + self, + ) -> None: + """_DockerEnvironmentProcess._recv_stream returns empty on EOF.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + mock_socket = MagicMock() + mock_socket.recv.return_value = b'' # EOF + proc._socket = mock_socket + + result = await proc.recv() + assert result == b'' + + async def test_docker_process_kill( + self, + ) -> None: + """_DockerEnvironmentProcess.kill closes the socket.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + mock_socket = MagicMock() + proc._socket = mock_socket + + await proc.kill() + mock_socket.close.assert_called_once() + + async def test_docker_process_kill_oserror( + self, + ) -> None: + """_DockerEnvironmentProcess.kill handles OSError.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + mock_socket = MagicMock() + mock_socket.close.side_effect = OSError('socket error') + proc._socket = mock_socket + + # Should not raise + await proc.kill() + + async def test_docker_process_returncode( + self, + ) -> None: + """_DockerEnvironmentProcess.returncode checks exec status.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + # No exec_id means returncode is None + assert proc.returncode is None + + # With exec_id and cached returncode + proc._exec_id = 'exec-123' + proc._returncode = 0 + assert proc.returncode == 0 + + async def test_docker_process_returncode_from_inspect( + self, + ) -> None: + """_DockerEnvironmentProcess._poll_exit_code polls Docker API.""" + + container = MockContainer() + container.client.api.exec_inspect.return_value = {'ExitCode': 42, 'Running': False} + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._exec_id = 'exec-123' + + # returncode only returns cached value — no I/O + assert proc.returncode is None + + # _poll_exit_code offloads the HTTP call and caches the result + rc = await proc._poll_exit_code() + assert rc == 42 + assert proc.returncode == 42 + + async def test_docker_process_returncode_still_running( + self, + ) -> None: + """_DockerEnvironmentProcess._poll_exit_code returns None when process is running.""" + + container = MockContainer() + # Docker returns ExitCode=0 + Running=True for still-running processes + container.client.api.exec_inspect.return_value = {'ExitCode': 0, 'Running': True} + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._exec_id = 'exec-123' + + assert await proc._poll_exit_code() is None + assert proc.returncode is None + + async def test_docker_process_returncode_inspect_error( + self, + ) -> None: + """_DockerEnvironmentProcess._poll_exit_code handles API errors.""" + + container = MockContainer() + container.client.api.exec_inspect.side_effect = OSError('connection failed') + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + proc._exec_id = 'exec-123' + + assert await proc._poll_exit_code() is None + assert proc.returncode is None + + async def test_docker_process_send( + self, + ) -> None: + """_DockerEnvironmentProcess.send writes to socket.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + mock_socket = MagicMock() + proc._socket = mock_socket + + await proc.send(b'hello') + mock_socket.sendall.assert_called_once_with(b'hello') + + async def test_docker_process_recv_with_timeout( + self, + ) -> None: + """_DockerEnvironmentProcess.recv with timeout.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + stdout_data = b'data' + header = struct.pack('>BxxxI', 1, len(stdout_data)) + mock_socket = MagicMock() + mock_socket.recv.side_effect = [header, stdout_data] + proc._socket = mock_socket + + result = await proc.recv(timeout=5.0) + assert result == stdout_data + + async def test_docker_process_recv_stderr_with_timeout( + self, + ) -> None: + """_DockerEnvironmentProcess.recv_stderr with timeout.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + stderr_data = b'error' + header = struct.pack('>BxxxI', 2, len(stderr_data)) + mock_socket = MagicMock() + mock_socket.recv.side_effect = [header, stderr_data] + proc._socket = mock_socket + + result = await proc.recv_stderr(timeout=5.0) + assert result == stderr_data + + async def test_docker_read_frame_data_eof_during_read( + self, + ) -> None: + """_DockerEnvironmentProcess._read_frame handles EOF during data read.""" + + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + # Header says 100 bytes but socket returns less then EOF + header = struct.pack('>BxxxI', 1, 100) + mock_socket = MagicMock() + mock_socket.recv.side_effect = [header, b'partial', b''] # EOF during data + proc._socket = mock_socket + + stream_type, data = proc._read_frame() + assert stream_type == 1 + assert data == b'partial' + assert proc._eof is True + + async def test_docker_process_start_with_env( + self, + ) -> None: + """_DockerEnvironmentProcess._do_start passes env to exec_create.""" + + container = MockContainer() + container.client.api.exec_create.return_value = {'Id': 'exec-test'} + mock_sock = MagicMock() + container.client.api.exec_start.return_value = mock_sock + + proc = _DockerEnvironmentProcess( + container, # type: ignore[arg-type] + 'echo test', + '/workspace', + env={'FOO': 'bar'}, + ) + await proc._start() + + assert proc._exec_id == 'exec-test' + call_kwargs = container.client.api.exec_create.call_args[1] + assert call_kwargs['environment'] == {'FOO': 'bar'} + + async def test_docker_process_aenter( + self, + ) -> None: + """_DockerEnvironmentProcess.__aenter__ starts the process.""" + + container = MockContainer() + container.client.api.exec_create.return_value = {'Id': 'exec-aenter'} + mock_sock = MagicMock() + container.client.api.exec_start.return_value = mock_sock + + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + entered = await proc.__aenter__() + assert entered is proc + assert proc._exec_id == 'exec-aenter' + + async def test_docker_process_poll_exit_code_no_exec_id( + self, + ) -> None: + """_DockerEnvironmentProcess._poll_exit_code returns None when _exec_id is None.""" + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + + # _exec_id is None by default (process not started) + assert proc._exec_id is None + assert await proc._poll_exit_code() is None + + async def test_docker_process_aexit_kills_running( + self, + ) -> None: + """_DockerEnvironmentProcess.__aexit__ kills the process if still running.""" + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + mock_socket = MagicMock() + proc._socket = mock_socket + # Process is "still running" — _exec_id set but inspect says Running=True + proc._exec_id = 'exec-aexit' + container.client.api.exec_inspect.return_value = {'Running': True, 'ExitCode': None} + + await proc.__aexit__(None, None, None) + + # Should have called kill (socket.close) + mock_socket.close.assert_called_once() + assert proc._returncode is None + + async def test_docker_process_aexit_already_exited( + self, + ) -> None: + """_DockerEnvironmentProcess.__aexit__ does not kill if process already exited.""" + container = MockContainer() + proc = _DockerEnvironmentProcess(container, 'echo test', '/workspace') # type: ignore[arg-type] + mock_socket = MagicMock() + proc._socket = mock_socket + proc._exec_id = 'exec-aexit' + container.client.api.exec_inspect.return_value = {'Running': False, 'ExitCode': 0} + + await proc.__aexit__(None, None, None) + + # Should NOT have called kill + mock_socket.close.assert_not_called() + assert proc._returncode == 0 + + async def test_mock_container_find_command( + self, + ) -> None: + """MockContainer.exec_run handles find commands for glob operations.""" + container = MockContainer() + container._files['/workspace/src/main.py'] = b'print("hello")' + container._files['/workspace/src/utils.py'] = b'# utils' + container._files['/workspace/README.md'] = b'# readme' + # File outside workdir should be skipped + container._files['/other/secret.txt'] = b'secret' + + # find with '.' searches all files relative to workdir + exit_code, output = container.exec_run(['sh', '-c', "find '.' -type f"], workdir='/workspace') + output_str = output.decode() + assert exit_code == 0 + assert './src/main.py' in output_str + assert './src/utils.py' in output_str + assert './README.md' in output_str + assert 'secret' not in output_str + + async def test_mock_container_find_command_subpath( + self, + ) -> None: + """MockContainer.exec_run handles find commands with a specific subdirectory.""" + container = MockContainer() + container._files['/workspace/src/main.py'] = b'print("hello")' + container._files['/workspace/docs/guide.md'] = b'# guide' + + exit_code, output = container.exec_run(['sh', '-c', "find 'src' -type f"], workdir='/workspace') + output_str = output.decode() + assert exit_code == 0 + assert 'src/main.py' in output_str + assert 'guide.md' not in output_str + + async def test_docker_read_file_image_not_found( + self, mock_docker_sandbox: Any, mock_container: MockContainer + ) -> None: + """DockerEnvironment.read_file raises FileNotFoundError for missing image files.""" + + def fail_get_archive(path: str) -> Any: + raise DockerNotFound('File not found') + + mock_container.get_archive = fail_get_archive + with pytest.raises(FileNotFoundError, match='File not found: missing.png'): + await mock_docker_sandbox.read_file('missing.png') + + # --- Additional Docker coverage: lifecycle, process, truncation --- + + async def test_docker_execute_truncation(self, mock_docker_sandbox: Any, mock_container: MockContainer) -> None: + """DockerEnvironment.execute truncates long output.""" + original = mock_container.exec_run + + def big_output(cmd: Any, **kwargs: Any) -> tuple[int, bytes]: + if isinstance(cmd, list) and 'echo' in str(cmd): + return 0, b'x' * 200_000 + return original(cmd, **kwargs) # pragma: no cover + + mock_container.exec_run = big_output # type: ignore[assignment] + result = await mock_docker_sandbox.shell('echo big') + assert len(result.output) == snapshot(200000) + + async def test_docker_execute_timeout_exit_code( + self, mock_docker_sandbox: Any, mock_container: MockContainer + ) -> None: + """DockerEnvironment.execute handles timeout exit code 124.""" + + def timeout_result(cmd: Any, **kwargs: Any) -> tuple[int, bytes]: + return 124, b'partial output' + + mock_container.exec_run = timeout_result # type: ignore[assignment] + result = await mock_docker_sandbox.shell('sleep 999', timeout=1) + assert result.exit_code == 124 + assert '[Command timed out]' in result.output + + async def test_docker_setup_teardown( + self, + ) -> None: + """DockerEnvironment._setup and _teardown with mocked Docker client.""" + sandbox = DockerEnvironment(image='python:3.12-slim') + + mock_client = MagicMock() + mock_container_obj = MagicMock() + mock_client.containers.run.return_value = mock_container_obj + + with mock_patch('pydantic_ai.environments.docker.docker') as mock_docker: + mock_docker.from_env.return_value = mock_client + sandbox._setup() + assert sandbox._container is not None + + # Teardown + sandbox._teardown() + mock_container_obj.stop.assert_called() + mock_container_obj.remove.assert_called() + assert sandbox._container is None + + async def test_docker_teardown_cleanup_errors( + self, + ) -> None: + """DockerEnvironment._teardown handles exceptions gracefully.""" + + sandbox = DockerEnvironment() + mock_container = MagicMock() + mock_container.stop.side_effect = DockerException('stop failed') + mock_container.remove.side_effect = DockerException('remove failed') + sandbox._container = mock_container + + # Should not raise + sandbox._teardown() + assert sandbox._container is None + + async def test_docker_setup_with_all_options( + self, + ) -> None: + """DockerEnvironment._setup passes all container options.""" + sandbox = DockerEnvironment( + image='python:3.12-slim', + env_vars={'KEY': 'val'}, + volumes={'/host': {'bind': '/container', 'mode': 'rw'}}, + memory_limit='512m', + cpu_limit=1.0, + pids_limit=256, + network_disabled=True, + read_only=True, + cap_drop=['ALL'], + security_opt=['no-new-privileges'], + user='nobody', + tmpfs={'/tmp': 'noexec,nosuid,size=64m'}, + init=True, + ) + + mock_client = MagicMock() + mock_container = MagicMock() + mock_client.containers.run.return_value = mock_container + + with mock_patch('pydantic_ai.environments.docker.docker') as mock_docker: + mock_docker.from_env.return_value = mock_client + sandbox._setup() + + call_kwargs = mock_client.containers.run.call_args[1] + assert call_kwargs['volumes'] == {'/host': {'bind': '/container', 'mode': 'rw'}} + assert call_kwargs['mem_limit'] == '512m' + assert call_kwargs['nano_cpus'] == int(1e9) + assert call_kwargs['pids_limit'] == 256 + assert call_kwargs['network_disabled'] is True + assert call_kwargs['read_only'] is True + assert call_kwargs['cap_drop'] == ['ALL'] + assert call_kwargs['security_opt'] == ['no-new-privileges'] + assert call_kwargs['user'] == 'nobody' + assert call_kwargs['tmpfs'] == {'/tmp': 'noexec,nosuid,size=64m'} + assert call_kwargs['init'] is True + + # --- Docker instantiation tests --- + + def test_docker_sandbox_instantiation( + self, + ): + """DockerEnvironment can be constructed without starting Docker.""" + + # Verify construction succeeds with default and custom settings + sandbox = DockerEnvironment(image='python:3.12-slim') + assert isinstance(sandbox, DockerEnvironment) + + sandbox_with_opts = DockerEnvironment( + image='node:20-slim', + memory_limit='512m', + cpu_limit=1.0, + network_disabled=True, + ) + assert isinstance(sandbox_with_opts, DockerEnvironment) + + # Verify security hardening parameters are accepted + sandbox_hardened = DockerEnvironment( + image='python:3.12-slim', + network_disabled=True, + read_only=True, + cap_drop=['ALL'], + security_opt=['no-new-privileges'], + user='nobody', + pids_limit=256, + tmpfs={'/tmp': 'noexec,nosuid,size=64m'}, + init=True, + ) + assert isinstance(sandbox_hardened, DockerEnvironment) + + def test_shell_escape(self): + assert _shell_escape('hello') == "'hello'" + assert _shell_escape("it's") == "'it'\\''s'" + assert _shell_escape('') == "''" + assert _shell_escape('a b c') == "'a b c'" + + def test_build_read_file_cmd_default(self): + cmd = _build_read_file_cmd('test.txt') + assert 'awk' in cmd + assert "'test.txt'" in cmd + assert 'NR>=1' in cmd + assert 'NR<=2000' in cmd + + def test_build_read_file_cmd_with_offset(self): + cmd = _build_read_file_cmd('file.py', offset=10, limit=50) + assert 'NR>=11' in cmd + assert 'NR<=60' in cmd + assert "'file.py'" in cmd + + def test_build_read_file_cmd_continuation_hint(self): + """_build_read_file_cmd includes a continuation hint in the awk END block.""" + cmd = _build_read_file_cmd('file.py', offset=0, limit=10) + assert 'more lines' in cmd + assert 'offset=10' in cmd + + +# --- Additional coverage: _base.py --- + + +async def test_execution_environment_aenter_aexit(): + """ExecutionEnvironment base __aenter__/__aexit__ are exercised by subclasses.""" + # MemoryEnvironment exercises the base class path + env = MemoryEnvironment() + async with env: + pass + + +# --- Additional coverage: _toolset.py --- + + +async def test_toolset_bash_empty_output(tmp_path: Path): + """ExecutionEnvironmentToolset bash returns just exit code when no output.""" + env = LocalEnvironment(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + ctx = build_run_context() + manager = await ToolManager[None](toolset).for_run_step(ctx) + + async with env: + result = await manager.handle_call(ToolCallPart(tool_name='shell', args={'command': 'true'})) + assert 'Exit code: 0' in str(result) + + +async def test_toolset_lifecycle_error(tmp_path: Path): + """ExecutionEnvironmentToolset handles environment startup failures.""" + + class FailingEnv(LocalEnvironment): + async def __aenter__(self): + raise RuntimeError('Setup failed') + + env = FailingEnv(tmp_path) + toolset = ExecutionEnvironmentToolset(env) + with pytest.raises(RuntimeError, match='Setup failed'): + async with toolset: + pass + + +# --- Additional coverage: local.py --- + + +async def test_local_process_stdin_not_available(): + """_LocalEnvironmentProcess.send raises when stdin is None.""" + mock_proc = MagicMock() + mock_proc.stdin = None + proc = _LocalEnvironmentProcess(mock_proc) + with pytest.raises(RuntimeError, match='stdin'): + await proc.send(b'data') + + +async def test_local_process_stdout_not_available(): + """_LocalEnvironmentProcess.recv raises when stdout is None.""" + mock_proc = MagicMock() + mock_proc.stdout = None + proc = _LocalEnvironmentProcess(mock_proc) + with pytest.raises(RuntimeError, match='stdout'): + await proc.recv() + + +async def test_local_process_stderr_not_available(): + """_LocalEnvironmentProcess.recv_stderr raises when stderr is None.""" + mock_proc = MagicMock() + mock_proc.stderr = None + proc = _LocalEnvironmentProcess(mock_proc) + with pytest.raises(RuntimeError, match='stderr'): + await proc.recv_stderr() + + +async def test_local_process_recv_stderr_timeout(tmp_path: Path): + """_LocalEnvironmentProcess.recv_stderr with timeout.""" + env = LocalEnvironment(tmp_path) + proc = await env.create_process('python -c "import sys; sys.stderr.write(\'err\\n\')"') + async with proc: + data = await proc.recv_stderr(timeout=5.0) + assert b'err' in data + + +async def test_local_process_recv_stderr_eof(tmp_path: Path): + """_LocalEnvironmentProcess.recv_stderr returns empty on EOF.""" + env = LocalEnvironment(tmp_path) + proc = await env.create_process('echo done') + async with proc: + await proc.wait(timeout=5.0) + # After process exits, stderr should return empty + data = await proc.recv_stderr() + assert data == b'' + + +async def test_local_process_kill_terminates_sleep(tmp_path: Path): + """_LocalEnvironmentProcess.kill terminates process.""" + env = LocalEnvironment(tmp_path) + proc = await env.create_process('sleep 60') + async with proc: + await proc.kill() + # After kill, returncode should be set + + +async def test_local_read_file_bytes_directory_raises_error(tmp_path: Path): + """LocalEnvironment.read_file_bytes raises on directory.""" + (tmp_path / 'subdir').mkdir() + env = LocalEnvironment(tmp_path) + with pytest.raises(FileNotFoundError, match='directory'): + await env.read_file('subdir') + + +async def test_local_read_file_bytes_not_found(tmp_path: Path): + """LocalEnvironment.read_file_bytes raises on missing file.""" + env = LocalEnvironment(tmp_path) + with pytest.raises(FileNotFoundError, match='not found'): + await env.read_file('nonexistent.txt') + + +async def test_local_execute_output_truncation(tmp_path: Path): + """LocalEnvironment.execute truncates long output.""" + # Write a script that outputs lots of text + script = tmp_path / 'big.py' + script.write_text("print('x' * 200000)") + env = LocalEnvironment(tmp_path) + result = await env.shell(f'python {script}') + assert len(result.output) == snapshot(200001) + + +# --- Additional coverage: memory.py --- + + +async def test_memory_normalize_leading_slash_in_constructor(): + """MemoryEnvironment normalizes paths with leading /.""" + env = MemoryEnvironment(files={'/abs/path.txt': 'content'}) + content = await env.read_file('abs/path.txt') + assert isinstance(content, str) + assert 'content' in content + + +async def test_memory_read_file_directory_error(): + """MemoryEnvironment.read_file raises on directory paths.""" + env = MemoryEnvironment(files={'dir/file.txt': 'content'}) + with pytest.raises(FileNotFoundError, match='directory'): + await env.read_file('dir') + + +async def test_memory_read_file_bytes_not_found_raises_error(): + """MemoryEnvironment.read_file_bytes raises on missing file.""" + env = MemoryEnvironment() + with pytest.raises(FileNotFoundError): + await env.read_file('missing.txt') + + +async def test_local_process_wait_no_timeout(tmp_path: Path): + """_LocalEnvironmentProcess.wait without timeout (line 74).""" + env = LocalEnvironment(tmp_path) + proc = await env.create_process('true') + async with proc: + exit_code = await proc.wait() # no timeout + assert exit_code == 0 + + +async def test_memory_normalize_absolute_path(): + """MemoryEnvironment._normalize strips leading / (line 76).""" + env = MemoryEnvironment(files={'path.txt': 'content'}) + # Normalize /path.txt should strip leading / + normalized = env._normalize('/path.txt') + assert normalized == 'path.txt' + + +async def test_memory_read_file_that_is_also_directory_prefix(): + """MemoryEnvironment.read_file when path exists as both file and directory prefix.""" + # 'dir' exists as a file AND 'dir/child.txt' makes it look like a directory too + env = MemoryEnvironment(files={'dir': 'I am a file', 'dir/child.txt': 'child content'}) + async with env: + content = await env.read_file('dir') + assert isinstance(content, str) + assert 'I am a file' in content + + +# --- ExecutionEnvironmentToolset: environment_factory --- + + +async def test_toolset_factory_basic(): + """Factory creates a fresh environment per __aenter__.""" + envs_created: list[MemoryEnvironment] = [] + + def factory() -> MemoryEnvironment: + env = MemoryEnvironment() + envs_created.append(env) + return env + + toolset = ExecutionEnvironmentToolset(environment_factory=factory) + + async with toolset: + assert len(envs_created) == 1 + assert toolset.environment is envs_created[0] + + # Second entry creates a new environment + async with toolset: + assert len(envs_created) == 2 + assert toolset.environment is envs_created[1] + assert envs_created[0] is not envs_created[1] + + +async def test_toolset_factory_concurrent(): + """Concurrent __aenter__ calls get different environments.""" + import asyncio + + envs_created: list[MemoryEnvironment] = [] + + def factory() -> MemoryEnvironment: + env = MemoryEnvironment() + envs_created.append(env) + return env + + toolset = ExecutionEnvironmentToolset(environment_factory=factory) + + async def enter_and_check() -> MemoryEnvironment: + async with toolset: + env = toolset.environment + assert isinstance(env, MemoryEnvironment) + return env + + env1, env2 = await asyncio.gather(enter_and_check(), enter_and_check()) + assert len(envs_created) == 2 + assert env1 is not env2 + + +async def test_toolset_factory_concurrent_isolation(): + """Two concurrent runs each write a file and don't see each other's files.""" + import asyncio + + def factory() -> MemoryEnvironment: + return MemoryEnvironment() + + toolset = ExecutionEnvironmentToolset(environment_factory=factory) + ctx = build_run_context() + + async def write_and_read(filename: str, content: str) -> tuple[str, str]: + """Write a file, then try to read a file the other task wrote.""" + other_file = 'b.txt' if filename == 'a.txt' else 'a.txt' + async with toolset: + manager = await ToolManager[None](toolset).for_run_step(ctx) + await manager.handle_call(ToolCallPart(tool_name='write_file', args={'path': filename, 'content': content})) + # Small delay so both tasks have a chance to write + await asyncio.sleep(0.01) + other_result = await manager.handle_call(ToolCallPart(tool_name='read_file', args={'path': other_file})) + return content, str(other_result) + + (content_a, read_b), (content_b, read_a) = await asyncio.gather( + write_and_read('a.txt', 'alpha'), + write_and_read('b.txt', 'beta'), + ) + + assert content_a == 'alpha' + assert content_b == 'beta' + # Each run should NOT see the other's file — they have isolated environments + assert 'Error' in read_b + assert 'Error' in read_a + + +async def test_toolset_factory_cleanup(): + """__aexit__ properly cleans up factory-created environments.""" + entered = 0 + exited = 0 + + class TrackingEnv(MemoryEnvironment): + async def __aenter__(self): + nonlocal entered + entered += 1 + return await super().__aenter__() + + async def __aexit__(self, *args: Any): + nonlocal exited + exited += 1 + return await super().__aexit__(*args) + + toolset = ExecutionEnvironmentToolset(environment_factory=TrackingEnv) + + async with toolset: + assert entered == 1 + assert exited == 0 + + assert entered == 1 + assert exited == 1 + + +async def test_toolset_factory_mutual_exclusivity(): + """Passing both shared_environment and environment_factory raises ValueError.""" + env = MemoryEnvironment() + with pytest.raises(ValueError, match='Cannot provide both'): + ExecutionEnvironmentToolset(env, environment_factory=MemoryEnvironment) + + +async def test_toolset_factory_with_use_environment(): + """use_environment() overrides the factory-created environment within the context.""" + override_env = MemoryEnvironment() + + toolset = ExecutionEnvironmentToolset(environment_factory=MemoryEnvironment) + + async with toolset: + factory_env = toolset.environment + assert factory_env is not override_env + + with toolset.use_environment(override_env): + assert toolset.environment is override_env + + # After exiting use_environment, factory env is restored + assert toolset.environment is factory_env + + +# --- Memory image file stored as string --- + + +async def test_memory_read_image_stored_as_string(): + """MemoryEnvironment returns bytes for image files even when stored as a string.""" + env = MemoryEnvironment(files={'image.png': 'fake png data'}) + async with env: + result = await env.read_file('image.png') + assert isinstance(result, bytes) + assert result == b'fake png data' + + +# --- ExecutionEnvironmentToolset: get_tools filters by runtime capabilities --- + + +async def test_toolset_factory_filters_tools_by_capabilities(): + """When using environment_factory, get_tools() only returns tools supported by the runtime environment.""" + + class _ShellOnlyEnv(BaseEnv): + @property + def capabilities(self) -> frozenset[EnvToolName]: + return frozenset({'shell'}) + + async def shell( + self, command: str, *, timeout: float | None = None, env: dict[str, str] | None = None + ) -> ExecutionResult: + return ExecutionResult(output='', exit_code=0) # pragma: no cover + + toolset = ExecutionEnvironmentToolset(environment_factory=_ShellOnlyEnv) + # Before entering, all tools are registered (no env to check) + ctx = build_run_context() + + async with toolset: + tools = await toolset.get_tools(ctx) + + # Only shell should be exposed — the runtime env only supports shell + assert set(tools.keys()) == {'shell'} + + +async def test_toolset_use_environment_filters_tools(): + """use_environment() with a limited env filters tools from get_tools().""" + + class _ShellOnlyEnv(BaseEnv): + @property + def capabilities(self) -> frozenset[EnvToolName]: + return frozenset({'shell'}) + + # Full-capability shared env registers all tools + full_env = MemoryEnvironment() + toolset = ExecutionEnvironmentToolset(full_env) + ctx = build_run_context() + + async with full_env: + all_tools = await toolset.get_tools(ctx) + assert 'read_file' in all_tools + assert 'write_file' in all_tools + + # Override with a limited env — only shell should remain + with toolset.use_environment(_ShellOnlyEnv()): + limited_tools = await toolset.get_tools(ctx) + assert set(limited_tools.keys()) == {'shell'} + + # After exiting use_environment, all tools are back + restored_tools = await toolset.get_tools(ctx) + assert set(restored_tools.keys()) == set(all_tools.keys()) + + +# --- Coverage gap tests --- + + +async def test_local_recv_no_timeout(tmp_path: Path): + """_LocalEnvironmentProcess.recv without timeout returns data.""" + env = LocalEnvironment(tmp_path) + proc = await env.create_process('echo hello') + async with proc: + data = await proc.recv() # no timeout + assert b'hello' in data + + +async def test_local_recv_end_of_stream(tmp_path: Path): + """_LocalEnvironmentProcess.recv returns empty bytes at EndOfStream.""" + env = LocalEnvironment(tmp_path) + proc = await env.create_process('true') + async with proc: + await proc.wait(timeout=5) + # After process exits, reading should return empty + data = await proc.recv() + assert data == b'' + + +async def test_local_read_file_binary_non_image(tmp_path: Path): + """LocalEnvironment.read_file returns raw bytes for non-image binary files.""" + async with LocalEnvironment(tmp_path) as env: + binary_path = tmp_path / 'data.bin' + binary_path.write_bytes(b'\x80\x81\x82\xff') + result = await env.read_file('data.bin') + assert isinstance(result, bytes) + assert result == b'\x80\x81\x82\xff' diff --git a/tests/test_examples.py b/tests/test_examples.py index e57d1a162d..22c9cb8acf 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -6,6 +6,8 @@ import shutil import ssl import sys +import tempfile +import types from collections.abc import AsyncIterator, Iterable, Sequence from dataclasses import dataclass, field from inspect import FrameInfo @@ -40,6 +42,7 @@ from pydantic_ai._utils import group_by_temporal from pydantic_ai.embeddings import EmbeddingModel, infer_embedding_model from pydantic_ai.embeddings.test import TestEmbeddingModel +from pydantic_ai.environments.local import LocalEnvironment as _LocalEnvironment from pydantic_ai.exceptions import UnexpectedModelBehavior from pydantic_ai.models import KnownModelName, Model, infer_model from pydantic_ai.models.fallback import FallbackModel @@ -58,6 +61,35 @@ pytestmark = [ pytest.mark.skipif(not imports_successful(), reason='extras not installed'), ] + +# --------------------------------------------------------------------------- +# Mock DockerEnvironment backed by LocalEnvironment for testing doc examples +# without requiring the `docker` package or a running Docker daemon. +# --------------------------------------------------------------------------- + + +class _MockDockerEnvironment(_LocalEnvironment): + """Test stand-in for DockerEnvironment that uses LocalEnvironment under the hood.""" + + def __init__(self, **_kwargs: Any) -> None: + # Use mkdtemp (no finalizer) instead of TemporaryDirectory to avoid + # PytestUnraisableExceptionWarning when constructor-only examples + # never enter the async context manager. + self._temp_path = Path(tempfile.mkdtemp()) + super().__init__(root_dir=self._temp_path) + + @classmethod + def hardened(cls, **kwargs: Any) -> _MockDockerEnvironment: + return cls(**kwargs) + + async def __aexit__(self, *_args: Any) -> None: + shutil.rmtree(self._temp_path, ignore_errors=True) + + +_mock_docker_env_module = types.ModuleType('pydantic_ai.environments.docker') +_mock_docker_env_module.__package__ = 'pydantic_ai.environments' +_mock_docker_env_module.DockerEnvironment = _MockDockerEnvironment # type: ignore[attr-defined] + code_examples: dict[str, CodeExample] = {} @@ -170,6 +202,10 @@ def print(self, *args: Any, **kwargs: Any) -> None: except ModuleNotFoundError: pass + # Replace DockerEnvironment with a LocalEnvironment-backed mock so doc + # examples that reference Docker can run without the docker package or daemon. + mocker.patch.dict(sys.modules, {'pydantic_ai.environments.docker': _mock_docker_env_module}) + env.set('OPENAI_API_KEY', 'testing') env.set('GEMINI_API_KEY', 'testing') env.set('GOOGLE_API_KEY', 'testing') @@ -573,6 +609,13 @@ async def call_tool( args={'name': 'test', 'value': 42}, tool_call_id='pyd_ai_tool_call_id', ), + # Execution environment doc examples + 'Create a Python script that prints the first 10 Fibonacci numbers, then run it.': 'Done! The first 10 Fibonacci numbers are: 0, 1, 1, 2, 3, 5, 8, 13, 21, 34', + 'Fetch https://httpbin.org/get and print the response': 'Successfully fetched the URL. The response contains request metadata including headers and origin IP.', + 'echo "running locally"': 'Command executed successfully.', + 'echo "running in Docker"': 'Command executed successfully.', + 'task A': 'Task A completed.', + 'task B': 'Task B completed.', } tool_responses: dict[tuple[str, str], str] = { diff --git a/uv.lock b/uv.lock index 61104748b2..ea27426ca7 100644 --- a/uv.lock +++ b/uv.lock @@ -7,6 +7,10 @@ resolution-markers = [ "python_full_version == '3.11.*'", "python_full_version < '3.11'", ] +conflicts = [[ + { package = "pydantic-ai-slim", extra = "huggingface" }, + { package = "pydantic-ai-slim", extra = "outlines-vllm-offline" }, +]] [manifest] members = [ @@ -23,7 +27,8 @@ name = "accelerate" version = "1.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, { name = "numpy" }, { name = "packaging" }, { name = "psutil" }, @@ -73,7 +78,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, { name = "aiosignal" }, - { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "async-timeout", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "attrs" }, { name = "frozenlist" }, { name = "multidict" }, @@ -201,7 +206,7 @@ version = "1.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "frozenlist" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } wheels = [ @@ -227,7 +232,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064, upload-time = "2025-11-14T20:35:04.057Z" } @@ -294,10 +299,10 @@ name = "anyio" version = "4.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "idna" }, { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126, upload-time = "2025-01-05T13:13:11.095Z" } wheels = [ @@ -350,7 +355,7 @@ dependencies = [ { name = "pyyaml" }, { name = "requests" }, { name = "requests-oauthlib" }, - { name = "tzdata", marker = "sys_platform == 'win32'" }, + { name = "tzdata", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a9/a7/bb182d81f35c3fe405505f0976da4b74f942cfdd53c7193b0fe50412aa27/apprise-1.9.6.tar.gz", hash = "sha256:4206be9cb5694a3d08dd8e0393bbb9b36212ac3a7769c2633620055e75c6caef", size = 1921714, upload-time = "2025-12-07T19:24:30.587Z" } wheels = [ @@ -383,7 +388,7 @@ name = "asgiref" version = "3.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/76/b9/4db2509eabd14b4a8c71d1b24c8d5734c52b8560a7b1e1a8b56c8d25568b/asgiref-3.11.0.tar.gz", hash = "sha256:13acff32519542a1736223fb79a715acdebe24286d98e8b164a73085f40da2c4", size = 37969, upload-time = "2025-11-19T15:32:20.106Z" } wheels = [ @@ -425,7 +430,7 @@ name = "asyncpg" version = "0.31.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "async-timeout", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } wheels = [ @@ -621,8 +626,8 @@ dependencies = [ { name = "pathspec" }, { name = "platformdirs" }, { name = "pytokens" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c4/d9/07b458a3f1c525ac392b5edc6b191ff140b596f9d77092429417a54e249d/black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7", size = 659264, upload-time = "2025-12-08T01:40:52.501Z" } wheels = [ @@ -770,7 +775,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, + { name = "typing-extensions", marker = "python_full_version < '3.12' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ac/42/2a73afec394eec6350d59c4deb4bda2639f7fc0ca8dfb2a41dcc4115f07e/boto3_stubs-1.42.14.tar.gz", hash = "sha256:b06c4be79348573fa03fc7fbe4bd82ebbc7e1e27cf208c8f5ab7bfcb75f55c05", size = 101097, upload-time = "2025-12-19T20:41:44.497Z" } wheels = [ @@ -981,7 +986,7 @@ name = "cffi" version = "2.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pycparser", marker = "implementation_name != 'PyPy'" }, + { name = "pycparser", marker = "implementation_name != 'PyPy' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ @@ -1171,7 +1176,7 @@ name = "click" version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } wheels = [ @@ -1223,7 +1228,7 @@ dependencies = [ { name = "loguru", marker = "python_full_version < '3.12'" }, { name = "pydantic", marker = "python_full_version < '3.12'" }, { name = "torch", marker = "python_full_version < '3.12'" }, - { name = "transformers", marker = "python_full_version < '3.12'" }, + { name = "transformers", version = "4.57.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fc/65/88dd1c58fb9d0ded51b5c86471b937a1525f91fad2211a6f051dc1ea822d/compressed_tensors-0.13.0.tar.gz", hash = "sha256:23893824d3498ea3f1a829f14a8fa85f9a5e76a34c711a038b8d7c619ca9a67c", size = 200995, upload-time = "2025-12-16T16:03:55.397Z" } wheels = [ @@ -1340,7 +1345,7 @@ wheels = [ [package.optional-dependencies] toml = [ - { name = "tomli", marker = "python_full_version <= '3.11'" }, + { name = "tomli", marker = "python_full_version <= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] [[package]] @@ -1348,8 +1353,8 @@ name = "cryptography" version = "46.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "cffi", marker = "platform_python_implementation != 'PyPy' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } wheels = [ @@ -1496,8 +1501,8 @@ dependencies = [ { name = "docstring-parser" }, { name = "rich" }, { name = "rich-rst" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/40/99/e1b75193ee23bd10a05a3b90c065d419b1c8c18f61cae6b8218c7158f792/cyclopts-4.4.1.tar.gz", hash = "sha256:368a404926b46a49dc328a33ccd7e55ba879296a28e64a42afe2f6667704cecf", size = 159245, upload-time = "2025-12-21T13:59:02.266Z" } wheels = [ @@ -1513,7 +1518,8 @@ dependencies = [ { name = "filelock" }, { name = "fsspec", extra = ["http"] }, { name = "httpx" }, - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, { name = "multiprocess" }, { name = "numpy" }, { name = "packaging" }, @@ -1678,7 +1684,7 @@ name = "docker" version = "7.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "pywin32", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "requests" }, { name = "urllib3" }, ] @@ -1811,7 +1817,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -1852,7 +1858,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "redis" }, { name = "sortedcontainers" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5f/f9/57464119936414d60697fcbd32f38909bb5688b616ae13de6e98384433e0/fakeredis-2.33.0.tar.gz", hash = "sha256:d7bc9a69d21df108a6451bbffee23b3eba432c21a654afc7ff2d295428ec5770", size = 175187, upload-time = "2025-12-16T19:45:52.269Z" } wheels = [ @@ -1896,13 +1902,13 @@ wheels = [ [package.optional-dependencies] standard = [ { name = "email-validator", marker = "python_full_version < '3.12'" }, - { name = "fastapi-cli", extra = ["standard"], marker = "python_full_version < '3.12'" }, + { name = "fastapi-cli", extra = ["standard"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "httpx", marker = "python_full_version < '3.12'" }, { name = "jinja2", marker = "python_full_version < '3.12'" }, { name = "pydantic-extra-types", marker = "python_full_version < '3.12'" }, { name = "pydantic-settings", marker = "python_full_version < '3.12'" }, { name = "python-multipart", marker = "python_full_version < '3.12'" }, - { name = "uvicorn", extra = ["standard"], marker = "python_full_version < '3.12'" }, + { name = "uvicorn", extra = ["standard"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] [[package]] @@ -1913,7 +1919,7 @@ dependencies = [ { name = "rich-toolkit", marker = "python_full_version < '3.12'" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typer", marker = "python_full_version < '3.12'" }, - { name = "uvicorn", extra = ["standard"], marker = "python_full_version < '3.12'" }, + { name = "uvicorn", extra = ["standard"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/99/75/9407a6b452be4c988feacec9c9d2f58d8f315162a6c7258d5a649d933ebe/fastapi_cli-0.0.16.tar.gz", hash = "sha256:e8a2a1ecf7a4e062e3b2eec63ae34387d1e142d4849181d936b23c4bdfe29073", size = 19447, upload-time = "2025-11-10T19:01:07.856Z" } wheels = [ @@ -1923,7 +1929,7 @@ wheels = [ [package.optional-dependencies] standard = [ { name = "fastapi-cloud-cli", marker = "python_full_version < '3.12'" }, - { name = "uvicorn", extra = ["standard"], marker = "python_full_version < '3.12'" }, + { name = "uvicorn", extra = ["standard"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] [[package]] @@ -1933,12 +1939,12 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastar", marker = "python_full_version < '3.12'" }, { name = "httpx", marker = "python_full_version < '3.12'" }, - { name = "pydantic", extra = ["email"], marker = "python_full_version < '3.12'" }, + { name = "pydantic", extra = ["email"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "rich-toolkit", marker = "python_full_version < '3.12'" }, { name = "rignore", marker = "python_full_version < '3.12'" }, { name = "sentry-sdk", marker = "python_full_version < '3.12'" }, { name = "typer", marker = "python_full_version < '3.12'" }, - { name = "uvicorn", extra = ["standard"], marker = "python_full_version < '3.12'" }, + { name = "uvicorn", extra = ["standard"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cf/0d/3b0d2991f481c122c552b4ae38a8b400a75ab0edbc85536f2a6224f72da2/fastapi_cloud_cli-0.7.0.tar.gz", hash = "sha256:8b025944475c3d53262105886dfe051f46383e4f287787a46892b524922ac0b6", size = 30906, upload-time = "2025-12-16T12:51:49.082Z" } wheels = [ @@ -2146,6 +2152,7 @@ version = "0.8.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/73/b1/1c3d635d955f2b4bf34d45abf8f35492e04dbd7804e94ce65d9f928ef3ec/fastrlock-0.8.3.tar.gz", hash = "sha256:4af6734d92eaa3ab4373e6c9a1dd0d5ad1304e172b1521733c6c3b3d73c8fa5d", size = 79327, upload-time = "2024-12-17T11:03:39.638Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/02/3f771177380d8690812d5b2b7736dc6b6c8cd1c317e4572e65f823eede08/fastrlock-0.8.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:cc5fa9166e05409f64a804d5b6d01af670979cdb12cd2594f555cb33cdc155bd", size = 55094, upload-time = "2024-12-17T11:01:49.721Z" }, { url = "https://files.pythonhosted.org/packages/be/b4/aae7ed94b8122c325d89eb91336084596cebc505dc629b795fcc9629606d/fastrlock-0.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7a77ebb0a24535ef4f167da2c5ee35d9be1e96ae192137e9dc3ff75b8dfc08a5", size = 48220, upload-time = "2024-12-17T11:01:51.071Z" }, { url = "https://files.pythonhosted.org/packages/96/87/9807af47617fdd65c68b0fcd1e714542c1d4d3a1f1381f591f1aa7383a53/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:d51f7fb0db8dab341b7f03a39a3031678cf4a98b18533b176c533c122bfce47d", size = 49551, upload-time = "2024-12-17T11:01:52.316Z" }, { url = "https://files.pythonhosted.org/packages/9d/12/e201634810ac9aee59f93e3953cb39f98157d17c3fc9d44900f1209054e9/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:767ec79b7f6ed9b9a00eb9ff62f2a51f56fdb221c5092ab2dadec34a9ccbfc6e", size = 49398, upload-time = "2024-12-17T11:01:53.514Z" }, @@ -2153,6 +2160,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/9e/1ae90829dd40559ab104e97ebe74217d9da794c4bb43016da8367ca7a596/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:92577ff82ef4a94c5667d6d2841f017820932bc59f31ffd83e4a2c56c1738f90", size = 52495, upload-time = "2024-12-17T11:01:57.76Z" }, { url = "https://files.pythonhosted.org/packages/e5/8c/5e746ee6f3d7afbfbb0d794c16c71bfd5259a4e3fb1dda48baf31e46956c/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3df8514086e16bb7c66169156a8066dc152f3be892c7817e85bf09a27fa2ada2", size = 51972, upload-time = "2024-12-17T11:02:01.384Z" }, { url = "https://files.pythonhosted.org/packages/76/a7/8b91068f00400931da950f143fa0f9018bd447f8ed4e34bed3fe65ed55d2/fastrlock-0.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:001fd86bcac78c79658bac496e8a17472d64d558cd2227fdc768aa77f877fe40", size = 30946, upload-time = "2024-12-17T11:02:03.491Z" }, + { url = "https://files.pythonhosted.org/packages/90/9e/647951c579ef74b6541493d5ca786d21a0b2d330c9514ba2c39f0b0b0046/fastrlock-0.8.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:f68c551cf8a34b6460a3a0eba44bd7897ebfc820854e19970c52a76bf064a59f", size = 55233, upload-time = "2024-12-17T11:02:04.795Z" }, { url = "https://files.pythonhosted.org/packages/be/91/5f3afba7d14b8b7d60ac651375f50fff9220d6ccc3bef233d2bd74b73ec7/fastrlock-0.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:55d42f6286b9d867370af4c27bc70d04ce2d342fe450c4a4fcce14440514e695", size = 48911, upload-time = "2024-12-17T11:02:06.173Z" }, { url = "https://files.pythonhosted.org/packages/d5/7a/e37bd72d7d70a8a551b3b4610d028bd73ff5d6253201d5d3cf6296468bee/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:bbc3bf96dcbd68392366c477f78c9d5c47e5d9290cb115feea19f20a43ef6d05", size = 50357, upload-time = "2024-12-17T11:02:07.418Z" }, { url = "https://files.pythonhosted.org/packages/0d/ef/a13b8bab8266840bf38831d7bf5970518c02603d00a548a678763322d5bf/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:77ab8a98417a1f467dafcd2226718f7ca0cf18d4b64732f838b8c2b3e4b55cb5", size = 50222, upload-time = "2024-12-17T11:02:08.745Z" }, @@ -2160,11 +2168,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/8f/65907405a8cdb2fc8beaf7d09a9a07bb58deff478ff391ca95be4f130b70/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c9d459ce344c21ff03268212a1845aa37feab634d242131bc16c2a2355d5f65", size = 53362, upload-time = "2024-12-17T11:02:12.476Z" }, { url = "https://files.pythonhosted.org/packages/ec/b9/ae6511e52738ba4e3a6adb7c6a20158573fbc98aab448992ece25abb0b07/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33e6fa4af4f3af3e9c747ec72d1eadc0b7ba2035456c2afb51c24d9e8a56f8fd", size = 52836, upload-time = "2024-12-17T11:02:13.74Z" }, { url = "https://files.pythonhosted.org/packages/88/3e/c26f8192c93e8e43b426787cec04bb46ac36e72b1033b7fe5a9267155fdf/fastrlock-0.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:5e5f1665d8e70f4c5b4a67f2db202f354abc80a321ce5a26ac1493f055e3ae2c", size = 31046, upload-time = "2024-12-17T11:02:15.033Z" }, + { url = "https://files.pythonhosted.org/packages/00/df/56270f2e10c1428855c990e7a7e5baafa9e1262b8e789200bd1d047eb501/fastrlock-0.8.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8cb2cf04352ea8575d496f31b3b88c42c7976e8e58cdd7d1550dfba80ca039da", size = 55727, upload-time = "2024-12-17T11:02:17.26Z" }, { url = "https://files.pythonhosted.org/packages/57/21/ea1511b0ef0d5457efca3bf1823effb9c5cad4fc9dca86ce08e4d65330ce/fastrlock-0.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85a49a1f1e020097d087e1963e42cea6f307897d5ebe2cb6daf4af47ffdd3eed", size = 52201, upload-time = "2024-12-17T11:02:19.512Z" }, { url = "https://files.pythonhosted.org/packages/80/07/cdecb7aa976f34328372f1c4efd6c9dc1b039b3cc8d3f38787d640009a25/fastrlock-0.8.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f13ec08f1adb1aa916c384b05ecb7dbebb8df9ea81abd045f60941c6283a670", size = 53924, upload-time = "2024-12-17T11:02:20.85Z" }, { url = "https://files.pythonhosted.org/packages/88/6d/59c497f8db9a125066dd3a7442fab6aecbe90d6fec344c54645eaf311666/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0ea4e53a04980d646def0f5e4b5e8bd8c7884288464acab0b37ca0c65c482bfe", size = 52140, upload-time = "2024-12-17T11:02:22.263Z" }, { url = "https://files.pythonhosted.org/packages/62/04/9138943c2ee803d62a48a3c17b69de2f6fa27677a6896c300369e839a550/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:38340f6635bd4ee2a4fb02a3a725759fe921f2ca846cb9ca44531ba739cc17b4", size = 53261, upload-time = "2024-12-17T11:02:24.418Z" }, { url = "https://files.pythonhosted.org/packages/e2/4b/db35a52589764c7745a613b6943bbd018f128d42177ab92ee7dde88444f6/fastrlock-0.8.3-cp312-cp312-win_amd64.whl", hash = "sha256:da06d43e1625e2ffddd303edcd6d2cd068e1c486f5fd0102b3f079c44eb13e2c", size = 31235, upload-time = "2024-12-17T11:02:25.708Z" }, + { url = "https://files.pythonhosted.org/packages/92/74/7b13d836c3f221cff69d6f418f46c2a30c4b1fe09a8ce7db02eecb593185/fastrlock-0.8.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5264088185ca8e6bc83181dff521eee94d078c269c7d557cc8d9ed5952b7be45", size = 54157, upload-time = "2024-12-17T11:02:29.196Z" }, { url = "https://files.pythonhosted.org/packages/06/77/f06a907f9a07d26d0cca24a4385944cfe70d549a2c9f1c3e3217332f4f12/fastrlock-0.8.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a98ba46b3e14927550c4baa36b752d0d2f7387b8534864a8767f83cce75c160", size = 50954, upload-time = "2024-12-17T11:02:32.12Z" }, { url = "https://files.pythonhosted.org/packages/f9/4e/94480fb3fd93991dd6f4e658b77698edc343f57caa2870d77b38c89c2e3b/fastrlock-0.8.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbdea6deeccea1917c6017d353987231c4e46c93d5338ca3e66d6cd88fbce259", size = 52535, upload-time = "2024-12-17T11:02:33.402Z" }, { url = "https://files.pythonhosted.org/packages/7d/a7/ee82bb55b6c0ca30286dac1e19ee9417a17d2d1de3b13bb0f20cefb86086/fastrlock-0.8.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6e5bfecbc0d72ff07e43fed81671747914d6794e0926700677ed26d894d4f4f", size = 50942, upload-time = "2024-12-17T11:02:34.688Z" }, @@ -2477,14 +2487,15 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, { name = "anyio" }, - { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, + { name = "audioop-lts", marker = "python_full_version >= '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "brotli" }, { name = "fastapi" }, { name = "ffmpy" }, { name = "gradio-client" }, { name = "groovy" }, { name = "httpx" }, - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, { name = "jinja2" }, { name = "markupsafe" }, { name = "numpy" }, @@ -2516,7 +2527,8 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fsspec" }, { name = "httpx" }, - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, { name = "packaging" }, { name = "typing-extensions" }, ] @@ -2864,8 +2876,8 @@ wheels = [ [package.optional-dependencies] brotli = [ - { name = "brotli", marker = "platform_python_implementation == 'CPython'" }, - { name = "brotlicffi", marker = "platform_python_implementation != 'CPython'" }, + { name = "brotli", marker = "platform_python_implementation == 'CPython' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "brotlicffi", marker = "platform_python_implementation != 'CPython' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] http2 = [ { name = "h2" }, @@ -2885,23 +2897,52 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.3.4" +version = "0.36.2" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.11.*'", + "python_full_version < '3.11'", +] dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, - { name = "httpx" }, - { name = "packaging" }, - { name = "pyyaml" }, - { name = "shellingham" }, - { name = "tqdm" }, - { name = "typer-slim" }, - { name = "typing-extensions" }, + { name = "filelock", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "fsspec", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "hf-xet", marker = "(python_full_version < '3.12' and platform_machine == 'aarch64' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (python_full_version < '3.12' and platform_machine == 'amd64' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (python_full_version < '3.12' and platform_machine == 'arm64' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (python_full_version < '3.12' and platform_machine == 'x86_64' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine != 'aarch64' and platform_machine != 'amd64' and platform_machine != 'arm64' and platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'aarch64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'amd64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'arm64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "packaging", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "pyyaml", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "requests", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "tqdm", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "typing-extensions", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/25/74af9d16cd59ae15b12467a79a84aa0fe24be4aba68fc4da0c1864d49c17/huggingface_hub-1.3.4.tar.gz", hash = "sha256:c20d5484a611b7b7891d272e8fc9f77d5de025b0480bdacfa858efb3780b455f", size = 627683, upload-time = "2026-01-26T14:05:10.656Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/b7/8cb61d2eece5fb05a83271da168186721c450eb74e3c31f7ef3169fa475b/huggingface_hub-0.36.2.tar.gz", hash = "sha256:1934304d2fb224f8afa3b87007d58501acfda9215b334eed53072dd5e815ff7a", size = 649782, upload-time = "2026-02-06T09:24:13.098Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/07/3d0c34c345043c6a398a5882e196b2220dc5861adfa18322448b90908f26/huggingface_hub-1.3.4-py3-none-any.whl", hash = "sha256:a0c526e76eb316e96a91e8a1a7a93cf66b0dd210be1a17bd5fc5ae53cba76bfd", size = 536611, upload-time = "2026-01-26T14:05:08.549Z" }, + { url = "https://files.pythonhosted.org/packages/a8/af/48ac8483240de756d2438c380746e7130d1c6f75802ef22f3c6d49982787/huggingface_hub-0.36.2-py3-none-any.whl", hash = "sha256:48f0c8eac16145dfce371e9d2d7772854a4f591bcb56c9cf548accf531d54270", size = 566395, upload-time = "2026-02-06T09:24:11.133Z" }, +] + +[[package]] +name = "huggingface-hub" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", + "python_full_version == '3.11.*'", + "python_full_version < '3.11'", +] +dependencies = [ + { name = "filelock", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "fsspec", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "hf-xet", marker = "(python_full_version >= '3.12' and platform_machine == 'AMD64') or (python_full_version >= '3.12' and platform_machine == 'aarch64') or (python_full_version >= '3.12' and platform_machine == 'amd64') or (python_full_version >= '3.12' and platform_machine == 'arm64') or (python_full_version >= '3.12' and platform_machine == 'x86_64') or (platform_machine != 'AMD64' and platform_machine != 'aarch64' and platform_machine != 'amd64' and platform_machine != 'arm64' and platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'AMD64' and extra == 'extra-16-pydantic-ai-slim-huggingface') or (platform_machine == 'AMD64' and extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'aarch64' and extra == 'extra-16-pydantic-ai-slim-huggingface') or (platform_machine == 'aarch64' and extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'amd64' and extra == 'extra-16-pydantic-ai-slim-huggingface') or (platform_machine == 'amd64' and extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'arm64' and extra == 'extra-16-pydantic-ai-slim-huggingface') or (platform_machine == 'arm64' and extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine == 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface') or (platform_machine == 'x86_64' and extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "httpx", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "packaging", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "pyyaml", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "shellingham", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "tqdm", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "typer-slim", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/fc/eb9bc06130e8bbda6a616e1b80a7aa127681c448d6b49806f61db2670b61/huggingface_hub-1.4.1.tar.gz", hash = "sha256:b41131ec35e631e7383ab26d6146b8d8972abc8b6309b963b306fbcca87f5ed5", size = 642156, upload-time = "2026-02-06T09:20:03.013Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/ae/2f6d96b4e6c5478d87d606a1934b5d436c4a2bce6bb7c6fdece891c128e3/huggingface_hub-1.4.1-py3-none-any.whl", hash = "sha256:9931d075fb7a79af5abc487106414ec5fba2c0ae86104c0c62fd6cae38873d18", size = 553326, upload-time = "2026-02-06T09:20:00.728Z" }, ] [[package]] @@ -3052,7 +3093,7 @@ dependencies = [ { name = "executing" }, { name = "pytest" }, { name = "rich" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1c/b1/52b5ee59f73ed31d5fe21b10881bf2d121d07d54b23c0b6b74186792e620/inline_snapshot-0.31.1.tar.gz", hash = "sha256:4ea5ed70aa1d652713bbfd750606b94bd8a42483f7d3680433b3e92994495f64", size = 2606338, upload-time = "2025-11-07T07:36:18.932Z" } wheels = [ @@ -3094,7 +3135,7 @@ name = "jaraco-context" version = "6.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, + { name = "backports-tarfile", marker = "python_full_version < '3.12' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cb/9c/a788f5bb29c61e456b8ee52ce76dbdd32fd72cd73dd67bc95f42c7a8d13c/jaraco_context-6.1.0.tar.gz", hash = "sha256:129a341b0a85a7db7879e22acd66902fda67882db771754574338898b2d5d86f", size = 15850, upload-time = "2026-01-13T02:53:53.847Z" } wheels = [ @@ -3342,13 +3383,13 @@ name = "keyring" version = "25.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.12' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "jaraco-classes" }, { name = "jaraco-context" }, { name = "jaraco-functools" }, - { name = "jeepney", marker = "sys_platform == 'linux'" }, - { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, - { name = "secretstorage", marker = "sys_platform == 'linux'" }, + { name = "jeepney", marker = "sys_platform == 'linux' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "secretstorage", marker = "sys_platform == 'linux' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/43/4b/674af6ef2f97d56f0ab5153bf0bfa28ccb6c3ed4d1babf4305449668807b/keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b", size = 63516, upload-time = "2025-11-16T16:26:09.482Z" } wheels = [ @@ -3392,7 +3433,7 @@ version = "0.6.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, - { name = "orjson", marker = "platform_python_implementation != 'PyPy'" }, + { name = "orjson", marker = "platform_python_implementation != 'PyPy' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "packaging" }, { name = "pydantic" }, { name = "requests" }, @@ -3505,9 +3546,12 @@ version = "1.3.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/48/3f7a9d3ff1b36bba92b5107a3a21286821227afe9ea464736133994d61fb/llguidance-1.3.0.tar.gz", hash = "sha256:861249afd51dc325646834462ea827e57a5c2b2042e108e6aae7059fdad9104d", size = 1070460, upload-time = "2025-10-20T19:58:44.164Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/33/be5acb85cd8cdc4afde33d9c234eece9f318e087920255af3c05864cd3e7/llguidance-1.3.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f7685222660a762e481ac633d49cc559c64980fe2ee59c8f932a5bb5cbc0c2c2", size = 3220647, upload-time = "2025-10-20T19:58:42.542Z" }, { url = "https://files.pythonhosted.org/packages/82/e6/b48bda5b15efeaeb62bd0dba8fc6a01d4ae5457a85dbb5d18632385fe15c/llguidance-1.3.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:098030ff0687261a3f1bd54cf21fe951fc861d56d37a0671250dd36677eaf224", size = 3099830, upload-time = "2025-10-20T19:58:40.826Z" }, { url = "https://files.pythonhosted.org/packages/aa/11/44389d3d1526d7a5c38ffd587a5ebc61d7bee443ac1dea95f2089ad58f5f/llguidance-1.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f6caca5d78db7f76e1fbb0fff8607b861c32d47fa3d5dee2fc49de27ee269df", size = 2835242, upload-time = "2025-10-20T19:58:34.518Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ca/53ea256396405e4dee70d5a4a35e18543408e18bb16b251d6ca6b5d80310/llguidance-1.3.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0612bb3f034d2487b6e8f9561f02a94a6039d88273bf0c5c539a3bd3895e47d2", size = 3297480, upload-time = "2025-10-20T19:58:37.033Z" }, { url = "https://files.pythonhosted.org/packages/83/a8/1ff2bedb8f9acb46a2d2d603415d272bb622c142ea86f5b95445cc6e366c/llguidance-1.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc17e9dd602c3879bf91664a64bf72f54c74dbfbeb24ccfab6a5fe435b12f7aa", size = 3033133, upload-time = "2025-10-20T19:58:38.721Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a7/9b8086c0cfdddf3f6d47b173a404fa7ac46272f7affbee082c36740f4f1c/llguidance-1.3.0-cp39-abi3-win32.whl", hash = "sha256:2f6f558485a43e273fc5c6c974a9a3ace5d5e170076db9b40e0560e41c3ff18f", size = 2598109, upload-time = "2025-10-20T19:58:47.656Z" }, { url = "https://files.pythonhosted.org/packages/5a/7e/809349638231f469b9056c0e1bfd924d5ef5558b3b3ec72d093b6fad33b1/llguidance-1.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:1d1cd1c8618d1a13605d3e057c978651e551c8c469b481ee4041f1d6c436002d", size = 2789946, upload-time = "2025-10-20T19:58:45.958Z" }, ] @@ -3515,10 +3559,6 @@ wheels = [ name = "llvmlite" version = "0.44.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.11.*'", - "python_full_version < '3.11'", -] sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/41/75/d4863ddfd8ab5f6e70f4504cf8cc37f4e986ec6910f4ef8502bb7d3c1c71/llvmlite-0.44.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9fbadbfba8422123bab5535b293da1cf72f9f478a65645ecd73e781f962ca614", size = 28132306, upload-time = "2025-01-20T11:12:18.634Z" }, @@ -3543,38 +3583,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/81/e66fc86539293282fd9cb7c9417438e897f369e79ffb62e1ae5e5154d4dd/llvmlite-0.44.0-cp313-cp313-win_amd64.whl", hash = "sha256:2fb7c4f2fb86cbae6dca3db9ab203eeea0e22d73b99bc2341cdf9de93612e930", size = 30331193, upload-time = "2025-01-20T11:14:38.578Z" }, ] -[[package]] -name = "llvmlite" -version = "0.46.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version == '3.12.*'", -] -sdist = { url = "https://files.pythonhosted.org/packages/74/cd/08ae687ba099c7e3d21fe2ea536500563ef1943c5105bf6ab4ee3829f68e/llvmlite-0.46.0.tar.gz", hash = "sha256:227c9fd6d09dce2783c18b754b7cd9d9b3b3515210c46acc2d3c5badd9870ceb", size = 193456, upload-time = "2025-12-08T18:15:36.295Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/a4/3959e1c61c5ca9db7921e5fd115b344c29b9d57a5dadd87bef97963ca1a5/llvmlite-0.46.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4323177e936d61ae0f73e653e2e614284d97d14d5dd12579adc92b6c2b0597b0", size = 37232766, upload-time = "2025-12-08T18:14:34.765Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a5/a4d916f1015106e1da876028606a8e87fd5d5c840f98c87bc2d5153b6a2f/llvmlite-0.46.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a2d461cb89537b7c20feb04c46c32e12d5ad4f0896c9dfc0f60336219ff248e", size = 56275176, upload-time = "2025-12-08T18:14:37.944Z" }, - { url = "https://files.pythonhosted.org/packages/79/7f/a7f2028805dac8c1a6fae7bda4e739b7ebbcd45b29e15bf6d21556fcd3d5/llvmlite-0.46.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b1f6595a35b7b39c3518b85a28bf18f45e075264e4b2dce3f0c2a4f232b4a910", size = 55128629, upload-time = "2025-12-08T18:14:41.674Z" }, - { url = "https://files.pythonhosted.org/packages/b2/bc/4689e1ba0c073c196b594471eb21be0aa51d9e64b911728aa13cd85ef0ae/llvmlite-0.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:e7a34d4aa6f9a97ee006b504be6d2b8cb7f755b80ab2f344dda1ef992f828559", size = 38138651, upload-time = "2025-12-08T18:14:45.845Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a1/2ad4b2367915faeebe8447f0a057861f646dbf5fbbb3561db42c65659cf3/llvmlite-0.46.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82f3d39b16f19aa1a56d5fe625883a6ab600d5cc9ea8906cca70ce94cabba067", size = 37232766, upload-time = "2025-12-08T18:14:48.836Z" }, - { url = "https://files.pythonhosted.org/packages/12/b5/99cf8772fdd846c07da4fd70f07812a3c8fd17ea2409522c946bb0f2b277/llvmlite-0.46.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a3df43900119803bbc52720e758c76f316a9a0f34612a886862dfe0a5591a17e", size = 56275175, upload-time = "2025-12-08T18:14:51.604Z" }, - { url = "https://files.pythonhosted.org/packages/38/f2/ed806f9c003563732da156139c45d970ee435bd0bfa5ed8de87ba972b452/llvmlite-0.46.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de183fefc8022d21b0aa37fc3e90410bc3524aed8617f0ff76732fc6c3af5361", size = 55128630, upload-time = "2025-12-08T18:14:55.107Z" }, - { url = "https://files.pythonhosted.org/packages/19/0c/8f5a37a65fc9b7b17408508145edd5f86263ad69c19d3574e818f533a0eb/llvmlite-0.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8b10bc585c58bdffec9e0c309bb7d51be1f2f15e169a4b4d42f2389e431eb93", size = 38138652, upload-time = "2025-12-08T18:14:58.171Z" }, - { url = "https://files.pythonhosted.org/packages/2b/f8/4db016a5e547d4e054ff2f3b99203d63a497465f81ab78ec8eb2ff7b2304/llvmlite-0.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b9588ad4c63b4f0175a3984b85494f0c927c6b001e3a246a3a7fb3920d9a137", size = 37232767, upload-time = "2025-12-08T18:15:00.737Z" }, - { url = "https://files.pythonhosted.org/packages/aa/85/4890a7c14b4fa54400945cb52ac3cd88545bbdb973c440f98ca41591cdc5/llvmlite-0.46.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3535bd2bb6a2d7ae4012681ac228e5132cdb75fefb1bcb24e33f2f3e0c865ed4", size = 56275176, upload-time = "2025-12-08T18:15:03.936Z" }, - { url = "https://files.pythonhosted.org/packages/6a/07/3d31d39c1a1a08cd5337e78299fca77e6aebc07c059fbd0033e3edfab45c/llvmlite-0.46.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cbfd366e60ff87ea6cc62f50bc4cd800ebb13ed4c149466f50cf2163a473d1e", size = 55128630, upload-time = "2025-12-08T18:15:07.196Z" }, - { url = "https://files.pythonhosted.org/packages/2a/6b/d139535d7590a1bba1ceb68751bef22fadaa5b815bbdf0e858e3875726b2/llvmlite-0.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:398b39db462c39563a97b912d4f2866cd37cba60537975a09679b28fbbc0fb38", size = 38138940, upload-time = "2025-12-08T18:15:10.162Z" }, - { url = "https://files.pythonhosted.org/packages/e6/ff/3eba7eb0aed4b6fca37125387cd417e8c458e750621fce56d2c541f67fa8/llvmlite-0.46.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:30b60892d034bc560e0ec6654737aaa74e5ca327bd8114d82136aa071d611172", size = 37232767, upload-time = "2025-12-08T18:15:13.22Z" }, - { url = "https://files.pythonhosted.org/packages/0e/54/737755c0a91558364b9200702c3c9c15d70ed63f9b98a2c32f1c2aa1f3ba/llvmlite-0.46.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6cc19b051753368a9c9f31dc041299059ee91aceec81bd57b0e385e5d5bf1a54", size = 56275176, upload-time = "2025-12-08T18:15:16.339Z" }, - { url = "https://files.pythonhosted.org/packages/e6/91/14f32e1d70905c1c0aa4e6609ab5d705c3183116ca02ac6df2091868413a/llvmlite-0.46.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bca185892908f9ede48c0acd547fe4dc1bafefb8a4967d47db6cf664f9332d12", size = 55128629, upload-time = "2025-12-08T18:15:19.493Z" }, - { url = "https://files.pythonhosted.org/packages/4a/a7/d526ae86708cea531935ae777b6dbcabe7db52718e6401e0fb9c5edea80e/llvmlite-0.46.0-cp313-cp313-win_amd64.whl", hash = "sha256:67438fd30e12349ebb054d86a5a1a57fd5e87d264d2451bcfafbbbaa25b82a35", size = 38138941, upload-time = "2025-12-08T18:15:22.536Z" }, - { url = "https://files.pythonhosted.org/packages/95/ae/af0ffb724814cc2ea64445acad05f71cff5f799bb7efb22e47ee99340dbc/llvmlite-0.46.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:d252edfb9f4ac1fcf20652258e3f102b26b03eef738dc8a6ffdab7d7d341d547", size = 37232768, upload-time = "2025-12-08T18:15:25.055Z" }, - { url = "https://files.pythonhosted.org/packages/c9/19/5018e5352019be753b7b07f7759cdabb69ca5779fea2494be8839270df4c/llvmlite-0.46.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:379fdd1c59badeff8982cb47e4694a6143bec3bb49aa10a466e095410522064d", size = 56275173, upload-time = "2025-12-08T18:15:28.109Z" }, - { url = "https://files.pythonhosted.org/packages/9f/c9/d57877759d707e84c082163c543853245f91b70c804115a5010532890f18/llvmlite-0.46.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e8cbfff7f6db0fa2c771ad24154e2a7e457c2444d7673e6de06b8b698c3b269", size = 55128628, upload-time = "2025-12-08T18:15:31.098Z" }, - { url = "https://files.pythonhosted.org/packages/30/a8/e61a8c2b3cc7a597073d9cde1fcbb567e9d827f1db30c93cf80422eac70d/llvmlite-0.46.0-cp314-cp314-win_amd64.whl", hash = "sha256:7821eda3ec1f18050f981819756631d60b6d7ab1a6cf806d9efefbe3f4082d61", size = 39153056, upload-time = "2025-12-08T18:15:33.938Z" }, -] - [[package]] name = "lm-format-enforcer" version = "0.11.3" @@ -3601,7 +3609,7 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "protobuf" }, { name = "rich" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e2/60/b8040db3598a55da64c45e3e689f2baa87389a4648a6f46ba80be3329f23/logfire-4.16.0.tar.gz", hash = "sha256:03a3ab8fdc13399309cb55d69cba7a6fcbad3526cfad85fc4f72e7d75e22b654", size = 550759, upload-time = "2025-12-04T16:16:39.477Z" } @@ -3940,12 +3948,12 @@ dependencies = [ { name = "pydantic-settings" }, { name = "pyjwt", extra = ["crypto"] }, { name = "python-multipart" }, - { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "pywin32", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "sse-starlette" }, { name = "starlette" }, { name = "typing-extensions" }, { name = "typing-inspection" }, - { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } wheels = [ @@ -3964,7 +3972,7 @@ version = "0.7.22" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fc/eb/b5cbf2484411af039a3d4aeb53a5160fae25dd8c84af6a4243bc2f3fedb3/mdformat-0.7.22.tar.gz", hash = "sha256:eef84fa8f233d3162734683c2a8a6222227a229b9206872e6139658d99acb1ea", size = 34610, upload-time = "2025-01-30T18:00:51.418Z" } wheels = [ @@ -4011,7 +4019,7 @@ dependencies = [ { name = "numpy", marker = "python_full_version < '3.12'" }, { name = "pillow", marker = "python_full_version < '3.12'" }, { name = "pydantic", marker = "python_full_version < '3.12'" }, - { name = "pydantic-extra-types", extra = ["pycountry"], marker = "python_full_version < '3.12'" }, + { name = "pydantic-extra-types", extra = ["pycountry"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "requests", marker = "python_full_version < '3.12'" }, { name = "tiktoken", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, @@ -4050,7 +4058,7 @@ version = "1.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, - { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "ghp-import" }, { name = "jinja2" }, { name = "markdown" }, @@ -4197,7 +4205,7 @@ dependencies = [ { name = "griffe" }, { name = "mkdocs-autorefs" }, { name = "mkdocstrings" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/24/75/d30af27a2906f00eb90143470272376d728521997800f5dce5b340ba35bc/mkdocstrings_python-2.0.1.tar.gz", hash = "sha256:843a562221e6a471fefdd4b45cc6c22d2607ccbad632879234fa9692e9cf7732", size = 199345, upload-time = "2025-12-03T14:26:11.755Z" } wheels = [ @@ -4209,24 +4217,34 @@ name = "mlx" version = "0.30.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "mlx-metal", marker = "sys_platform == 'darwin'" }, + { name = "mlx-metal", marker = "sys_platform == 'darwin' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/cd/8d/16a34feb957ac33525b9b787b5132053a44bc94d1bf40c18639f6e05cd2a/mlx-0.30.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:391c650f0578ce359c8cffddb204b42798b622f9ee2b57b865d87716c00db536", size = 592926, upload-time = "2025-12-18T01:55:28.757Z" }, { url = "https://files.pythonhosted.org/packages/34/e6/0661455f5f4bd9de257874b28a96a33699d36a1e17ccde821341c0ac1c0e/mlx-0.30.1-cp310-cp310-macosx_15_0_arm64.whl", hash = "sha256:42fefcad72d7488c65649e152a1b28f00c2033d38121afa45ce65ae16ec6b988", size = 592926, upload-time = "2025-12-18T01:55:30.141Z" }, { url = "https://files.pythonhosted.org/packages/d8/37/a322af7dba9101064b5e858d1208e0e66cd83be7d060d14fa03ace37d52e/mlx-0.30.1-cp310-cp310-macosx_26_0_arm64.whl", hash = "sha256:a9db94e7e080672cc0dda9a5f121aebe0d49f7a8cb46706ecfd8b8ce7d99d541", size = 566952, upload-time = "2025-12-18T00:15:50.075Z" }, + { url = "https://files.pythonhosted.org/packages/c9/46/f0005d07fe5687bbf4efc15b468d27f2923f486b07a625d35c7d3cbb4962/mlx-0.30.1-cp310-cp310-manylinux_2_35_aarch64.whl", hash = "sha256:44b2142896c8dd8ab057dd785faf92fa83f3697b4b6bb01ff7515df12b6de666", size = 658049, upload-time = "2025-12-18T01:55:31.748Z" }, + { url = "https://files.pythonhosted.org/packages/cb/95/cc47c4607cc78f55ce3081ade9161961795c15c049bf219f27a393f85767/mlx-0.30.1-cp310-cp310-manylinux_2_35_x86_64.whl", hash = "sha256:37ea97b3c4bd71b19d87c6ef2c9e681e11f37908d8381fc2b785d2509b0681df", size = 692336, upload-time = "2025-12-18T01:55:33.224Z" }, { url = "https://files.pythonhosted.org/packages/07/14/74acbd677ececd17a44dafda1b472aebacef54f60ff9a41a801f711de9a7/mlx-0.30.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:acfd7d1b8e5b9fa1b7e9fab4cc5ba6a492c559fbb1c5aeab16c1d7a148ab4f1b", size = 593048, upload-time = "2025-12-18T01:55:34.9Z" }, { url = "https://files.pythonhosted.org/packages/58/8c/5309848afb9c53d363f59b88ae5811de248e2817e91aeadf007e2ac8d22b/mlx-0.30.1-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:b62030471272d1835b8137164bd43d863cc93ff1d67ec4f1f87bb4c8613dd5a6", size = 593043, upload-time = "2025-12-18T01:55:36.839Z" }, { url = "https://files.pythonhosted.org/packages/e8/5a/0039815a930f0193e2cffb27c57dc6971004bce0086c2bbbdb10395c272c/mlx-0.30.1-cp311-cp311-macosx_26_0_arm64.whl", hash = "sha256:0489cd340f2d262cb3aaad4368e40e84b152e182e4cea37ba018e56c72e1d020", size = 567014, upload-time = "2025-12-18T00:15:51.731Z" }, + { url = "https://files.pythonhosted.org/packages/de/c7/6bdb5497c1f5ed3e33afa7785761ad87fd3436c071805d9a93c905943f04/mlx-0.30.1-cp311-cp311-manylinux_2_35_aarch64.whl", hash = "sha256:fbdcfc3ed556a7e701a8eb67da299e2a25f52615193833ca6374decca3be5bf4", size = 658930, upload-time = "2025-12-18T01:55:38.441Z" }, + { url = "https://files.pythonhosted.org/packages/91/02/2d86a1c116e951eb4d88fe313c321e23628ce7404712e1258cacf925a8b8/mlx-0.30.1-cp311-cp311-manylinux_2_35_x86_64.whl", hash = "sha256:68ec854e7b5f89454e67d6c2fa7bb416b8afb148003ccd775904ec6ec4744818", size = 692484, upload-time = "2025-12-18T01:55:40.254Z" }, { url = "https://files.pythonhosted.org/packages/3a/4b/ad57b2f0ede3f0d009c0e3e1270c219bd18f9025388855ee149680cffa20/mlx-0.30.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:deaef3ecd2f99930867a29de748e3bffa9cc7e4dfa834f2501c37ed29aece1cc", size = 593397, upload-time = "2025-12-18T01:55:41.814Z" }, { url = "https://files.pythonhosted.org/packages/ef/14/7fa03a0f66ac3cfb2fd6752178a1488f13c7233fff26eed0f832d961db35/mlx-0.30.1-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:86ccdcda0b5ea4768b87da25beae5b83ac7cc802506116b6845cea6f450e2377", size = 593397, upload-time = "2025-12-18T01:55:43Z" }, { url = "https://files.pythonhosted.org/packages/9c/c8/9f1343dbe2381f9653df4e0a62dc8bf38f575a2553dc2aa6916de32d2a85/mlx-0.30.1-cp312-cp312-macosx_26_0_arm64.whl", hash = "sha256:a625cb434b2acc5674fe10683374641dab9671fb354ae7c2c67a1fb0405eeb37", size = 567576, upload-time = "2025-12-18T00:15:55.114Z" }, + { url = "https://files.pythonhosted.org/packages/15/ff/485ed9c99c18ef89ac987178c0a526cb4148ba38b14838d315311d9d76a8/mlx-0.30.1-cp312-cp312-manylinux_2_35_aarch64.whl", hash = "sha256:ccc1ff3aca8fb1073c7dcd1274cebe48ae75f852d14b16c7db8228fbbad594dd", size = 643654, upload-time = "2025-12-18T01:55:44.165Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d3/54d3bf5e404c3b6424b49c505dc8b3c06c6bb498fe720195b1fafbd69b5e/mlx-0.30.1-cp312-cp312-manylinux_2_35_x86_64.whl", hash = "sha256:55ed7fc4b389d6e49dac6d34a97b41e61cbe3662ac29c3d29cf612e6b2ed9827", size = 687305, upload-time = "2025-12-18T01:55:45.526Z" }, { url = "https://files.pythonhosted.org/packages/f9/fd/c6f56cd87d48763ed63655ace627c06db9819eae7d43d132f40d4965947a/mlx-0.30.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:743520758bc8261b2ed8f3b3dc96e4e9236769dd8f61fb17877c5e44037e2058", size = 593366, upload-time = "2025-12-18T01:55:46.786Z" }, { url = "https://files.pythonhosted.org/packages/dc/53/96d8c48b21f91c4216b6d2ef6dfc10862e5fb0b811a2aaf02c96c78601de/mlx-0.30.1-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:fc9745bc1860ca60128e3a6d36157da06d936e2b4007a4dcba990b40202f598f", size = 593368, upload-time = "2025-12-18T01:55:48.363Z" }, { url = "https://files.pythonhosted.org/packages/70/ce/476c3b7d3a4153bd0e1c5af1f1b6c09a804b652bbed34072404b322c22e0/mlx-0.30.1-cp313-cp313-macosx_26_0_arm64.whl", hash = "sha256:a1480399c67bb327a66c5527b73915132e3fcaae3bce9634e5c81ccad9f43229", size = 567561, upload-time = "2025-12-18T00:15:56.153Z" }, + { url = "https://files.pythonhosted.org/packages/33/41/7ad1e639fd7dd1cf01a62c1c5b051024a859888c27504996e9d8380e6754/mlx-0.30.1-cp313-cp313-manylinux_2_35_aarch64.whl", hash = "sha256:8e19850a4236a8e174f851f5789b8b62a8eb74f5a8fa49ad8ba286c5ddb5f9bf", size = 643122, upload-time = "2025-12-18T01:55:49.607Z" }, + { url = "https://files.pythonhosted.org/packages/d0/dc/72d3737c5b0662eb5e785d353dbc5e34d793d27b09b99e39993ee051bd19/mlx-0.30.1-cp313-cp313-manylinux_2_35_x86_64.whl", hash = "sha256:1c8ed5bcd9f1910fca209e95859ac737e60b3e1954181b820fa269158f81049a", size = 687254, upload-time = "2025-12-18T01:55:51.239Z" }, { url = "https://files.pythonhosted.org/packages/9b/cc/523448996247bb05d9d68e23bccf3dafdda660befb9330f6bd5fa13361e8/mlx-0.30.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:d34cc2c25b0ee41c1349f14650db760e282685339858e305453f62405c12bc1b", size = 596006, upload-time = "2025-12-18T01:55:52.463Z" }, { url = "https://files.pythonhosted.org/packages/23/0e/f9f2f9659c34c87be8f4167f6a1d6ed7e826f4889d20eecd4c0d8122f0e9/mlx-0.30.1-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:4e47d301e9095b87f0bda8827bfd6ffe744223aba5cee8f28e25894d647f5823", size = 596008, upload-time = "2025-12-18T01:55:54.02Z" }, { url = "https://files.pythonhosted.org/packages/56/a7/49e41fb141de95b6a376091a963c737839c9cda04e423c67f57460a50458/mlx-0.30.1-cp314-cp314-macosx_26_0_arm64.whl", hash = "sha256:cfba13e2a52255d663a1ad62f0f83eb3991e42147edf9a8d38cdd224e48ca49b", size = 570406, upload-time = "2025-12-18T00:15:57.177Z" }, + { url = "https://files.pythonhosted.org/packages/73/99/a43cb112167cf865c069f5e108ae42f5314663930ff3dd86c2d23d984191/mlx-0.30.1-cp314-cp314-manylinux_2_35_aarch64.whl", hash = "sha256:bebfec377208eb29cc88aa86c897c7446aa0984838669e138f273f9225d627ff", size = 646461, upload-time = "2025-12-18T01:55:55.285Z" }, + { url = "https://files.pythonhosted.org/packages/d4/ff/1e1968f107b4221a98dc26832586b1f646b27ddf3e55c95051c09d751f0a/mlx-0.30.1-cp314-cp314-manylinux_2_35_x86_64.whl", hash = "sha256:d18012d5cf0f013bc4a405cfd1e9d2d28e798f4d2dc4f15aa0fbffff73c02ba2", size = 687114, upload-time = "2025-12-18T01:55:56.506Z" }, ] [[package]] @@ -4235,12 +4253,13 @@ version = "0.29.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinja2" }, - { name = "mlx", marker = "sys_platform == 'darwin'" }, + { name = "mlx", marker = "sys_platform == 'darwin' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "numpy" }, { name = "protobuf" }, { name = "pyyaml" }, { name = "sentencepiece" }, - { name = "transformers" }, + { name = "transformers", version = "4.57.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "transformers", version = "5.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e3/62/f46e1355256a114808517947f8e83ad6be310c7288c551db0fa678f47923/mlx_lm-0.29.1.tar.gz", hash = "sha256:b99180d8f33d33a077b814e550bfb2d8a59ae003d668fd1f4b3fff62a381d34b", size = 232302, upload-time = "2025-12-16T16:58:27.959Z" } wheels = [ @@ -4439,7 +4458,7 @@ name = "multidict" version = "6.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ @@ -4600,10 +4619,10 @@ name = "mypy" version = "1.19.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "librt", marker = "platform_python_implementation != 'PyPy' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "mypy-extensions" }, { name = "pathspec" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } @@ -4646,7 +4665,7 @@ name = "mypy-boto3-bedrock-runtime" version = "1.42.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, + { name = "typing-extensions", marker = "python_full_version < '3.12' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1b/95/cb46d84a7a1408e14cac8a8dbbb24a612e438dd10b5f284fb5e01deece3a/mypy_boto3_bedrock_runtime-1.42.3.tar.gz", hash = "sha256:15686cf925719f14bc0d6c85530808736005fb431f007e37d40e10daff4032cc", size = 29476, upload-time = "2025-12-04T20:56:45.423Z" } wheels = [ @@ -4739,13 +4758,9 @@ wheels = [ name = "numba" version = "0.61.2" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.11.*'", - "python_full_version < '3.11'", -] dependencies = [ - { name = "llvmlite", version = "0.44.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, - { name = "numpy", marker = "python_full_version < '3.12'" }, + { name = "llvmlite" }, + { name = "numpy" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } wheels = [ @@ -4771,42 +4786,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/a4/6d3a0f2d3989e62a18749e1e9913d5fa4910bbb3e3311a035baea6caf26d/numba-0.61.2-cp313-cp313-win_amd64.whl", hash = "sha256:59321215e2e0ac5fa928a8020ab00b8e57cda8a97384963ac0dfa4d4e6aa54e7", size = 2831846, upload-time = "2025-04-09T02:58:06.125Z" }, ] -[[package]] -name = "numba" -version = "0.63.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version == '3.12.*'", -] -dependencies = [ - { name = "llvmlite", version = "0.46.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, - { name = "numpy", marker = "python_full_version >= '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/dc/60/0145d479b2209bd8fdae5f44201eceb8ce5a23e0ed54c71f57db24618665/numba-0.63.1.tar.gz", hash = "sha256:b320aa675d0e3b17b40364935ea52a7b1c670c9037c39cf92c49502a75902f4b", size = 2761666, upload-time = "2025-12-10T02:57:39.002Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/ce/5283d4ffa568f795bb0fd61ee1f0efc0c6094b94209259167fc8d4276bde/numba-0.63.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6d6bf5bf00f7db629305caaec82a2ffb8abe2bf45eaad0d0738dc7de4113779", size = 2680810, upload-time = "2025-12-10T02:56:55.269Z" }, - { url = "https://files.pythonhosted.org/packages/0f/72/a8bda517e26d912633b32626333339b7c769ea73a5c688365ea5f88fd07e/numba-0.63.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:08653d0dfc9cc9c4c9a8fba29ceb1f2d5340c3b86c4a7e5e07e42b643bc6a2f4", size = 3739735, upload-time = "2025-12-10T02:56:57.922Z" }, - { url = "https://files.pythonhosted.org/packages/ca/17/1913b7c1173b2db30fb7a9696892a7c4c59aeee777a9af6859e9e01bac51/numba-0.63.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f09eebf5650246ce2a4e9a8d38270e2d4b0b0ae978103bafb38ed7adc5ea906e", size = 3446707, upload-time = "2025-12-10T02:56:59.837Z" }, - { url = "https://files.pythonhosted.org/packages/b4/77/703db56c3061e9fdad5e79c91452947fdeb2ec0bdfe4affe9b144e7025e0/numba-0.63.1-cp310-cp310-win_amd64.whl", hash = "sha256:f8bba17421d865d8c0f7be2142754ebce53e009daba41c44cf6909207d1a8d7d", size = 2747374, upload-time = "2025-12-10T02:57:07.908Z" }, - { url = "https://files.pythonhosted.org/packages/70/90/5f8614c165d2e256fbc6c57028519db6f32e4982475a372bbe550ea0454c/numba-0.63.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b33db00f18ccc790ee9911ce03fcdfe9d5124637d1ecc266f5ae0df06e02fec3", size = 2680501, upload-time = "2025-12-10T02:57:09.797Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9d/d0afc4cf915edd8eadd9b2ab5b696242886ee4f97720d9322650d66a88c6/numba-0.63.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d31ea186a78a7c0f6b1b2a3fe68057fdb291b045c52d86232b5383b6cf4fc25", size = 3744945, upload-time = "2025-12-10T02:57:11.697Z" }, - { url = "https://files.pythonhosted.org/packages/05/a9/d82f38f2ab73f3be6f838a826b545b80339762ee8969c16a8bf1d39395a8/numba-0.63.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed3bb2fbdb651d6aac394388130a7001aab6f4541837123a4b4ab8b02716530c", size = 3450827, upload-time = "2025-12-10T02:57:13.709Z" }, - { url = "https://files.pythonhosted.org/packages/18/3f/a9b106e93c5bd7434e65f044bae0d204e20aa7f7f85d72ceb872c7c04216/numba-0.63.1-cp311-cp311-win_amd64.whl", hash = "sha256:1ecbff7688f044b1601be70113e2fb1835367ee0b28ffa8f3adf3a05418c5c87", size = 2747262, upload-time = "2025-12-10T02:57:15.664Z" }, - { url = "https://files.pythonhosted.org/packages/14/9c/c0974cd3d00ff70d30e8ff90522ba5fbb2bcee168a867d2321d8d0457676/numba-0.63.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2819cd52afa5d8d04e057bdfd54367575105f8829350d8fb5e4066fb7591cc71", size = 2680981, upload-time = "2025-12-10T02:57:17.579Z" }, - { url = "https://files.pythonhosted.org/packages/cb/70/ea2bc45205f206b7a24ee68a159f5097c9ca7e6466806e7c213587e0c2b1/numba-0.63.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5cfd45dbd3d409e713b1ccfdc2ee72ca82006860254429f4ef01867fdba5845f", size = 3801656, upload-time = "2025-12-10T02:57:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/0d/82/4f4ba4fd0f99825cbf3cdefd682ca3678be1702b63362011de6e5f71f831/numba-0.63.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69a599df6976c03b7ecf15d05302696f79f7e6d10d620367407517943355bcb0", size = 3501857, upload-time = "2025-12-10T02:57:20.721Z" }, - { url = "https://files.pythonhosted.org/packages/af/fd/6540456efa90b5f6604a86ff50dabefb187e43557e9081adcad3be44f048/numba-0.63.1-cp312-cp312-win_amd64.whl", hash = "sha256:bbad8c63e4fc7eb3cdb2c2da52178e180419f7969f9a685f283b313a70b92af3", size = 2750282, upload-time = "2025-12-10T02:57:22.474Z" }, - { url = "https://files.pythonhosted.org/packages/57/f7/e19e6eff445bec52dde5bed1ebb162925a8e6f988164f1ae4b3475a73680/numba-0.63.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:0bd4fd820ef7442dcc07da184c3f54bb41d2bdb7b35bacf3448e73d081f730dc", size = 2680954, upload-time = "2025-12-10T02:57:24.145Z" }, - { url = "https://files.pythonhosted.org/packages/e9/6c/1e222edba1e20e6b113912caa9b1665b5809433cbcb042dfd133c6f1fd38/numba-0.63.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:53de693abe4be3bd4dee38e1c55f01c55ff644a6a3696a3670589e6e4c39cde2", size = 3809736, upload-time = "2025-12-10T02:57:25.836Z" }, - { url = "https://files.pythonhosted.org/packages/76/0a/590bad11a8b3feeac30a24d01198d46bdb76ad15c70d3a530691ce3cae58/numba-0.63.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81227821a72a763c3d4ac290abbb4371d855b59fdf85d5af22a47c0e86bf8c7e", size = 3508854, upload-time = "2025-12-10T02:57:27.438Z" }, - { url = "https://files.pythonhosted.org/packages/4e/f5/3800384a24eed1e4d524669cdbc0b9b8a628800bb1e90d7bd676e5f22581/numba-0.63.1-cp313-cp313-win_amd64.whl", hash = "sha256:eb227b07c2ac37b09432a9bda5142047a2d1055646e089d4a240a2643e508102", size = 2750228, upload-time = "2025-12-10T02:57:30.36Z" }, - { url = "https://files.pythonhosted.org/packages/36/2f/53be2aa8a55ee2608ebe1231789cbb217f6ece7f5e1c685d2f0752e95a5b/numba-0.63.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f180883e5508940cc83de8a8bea37fc6dd20fbe4e5558d4659b8b9bef5ff4731", size = 2681153, upload-time = "2025-12-10T02:57:32.016Z" }, - { url = "https://files.pythonhosted.org/packages/13/91/53e59c86759a0648282368d42ba732c29524a745fd555ed1fb1df83febbe/numba-0.63.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0938764afa82a47c0e895637a6c55547a42c9e1d35cac42285b1fa60a8b02bb", size = 3778718, upload-time = "2025-12-10T02:57:33.764Z" }, - { url = "https://files.pythonhosted.org/packages/6c/0c/2be19eba50b0b7636f6d1f69dfb2825530537708a234ba1ff34afc640138/numba-0.63.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f90a929fa5094e062d4e0368ede1f4497d5e40f800e80aa5222c4734236a2894", size = 3478712, upload-time = "2025-12-10T02:57:35.518Z" }, - { url = "https://files.pythonhosted.org/packages/0d/5f/4d0c9e756732577a52211f31da13a3d943d185f7fb90723f56d79c696caa/numba-0.63.1-cp314-cp314-win_amd64.whl", hash = "sha256:8d6d5ce85f572ed4e1a135dbb8c0114538f9dd0e3657eeb0bb64ab204cbe2a8f", size = 2752161, upload-time = "2025-12-10T02:57:37.12Z" }, -] - [[package]] name = "numpy" version = "2.2.6" @@ -4874,7 +4853,9 @@ name = "nvidia-cublas-cu12" version = "12.8.4.1" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/29/99/db44d685f0e257ff0e213ade1964fc459b4a690a73293220e98feb3307cf/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b86f6dd8935884615a0683b663891d43781b819ac4f2ba2b0c9604676af346d0", size = 590537124, upload-time = "2025-03-07T01:43:53.556Z" }, { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, + { url = "https://files.pythonhosted.org/packages/70/61/7d7b3c70186fb651d0fbd35b01dbfc8e755f69fd58f817f3d0f642df20c3/nvidia_cublas_cu12-12.8.4.1-py3-none-win_amd64.whl", hash = "sha256:47e9b82132fa8d2b4944e708049229601448aaad7e6f296f630f2d1a32de35af", size = 567544208, upload-time = "2025-03-07T01:53:30.535Z" }, ] [[package]] @@ -4882,7 +4863,9 @@ name = "nvidia-cuda-cupti-cu12" version = "12.8.90" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1f/b3bd73445e5cb342727fd24fe1f7b748f690b460acadc27ea22f904502c8/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4412396548808ddfed3f17a467b104ba7751e6b58678a4b840675c56d21cf7ed", size = 9533318, upload-time = "2025-03-07T01:40:10.421Z" }, { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621, upload-time = "2025-03-07T01:40:21.213Z" }, + { url = "https://files.pythonhosted.org/packages/41/bc/83f5426095d93694ae39fe1311431b5d5a9bb82e48bf0dd8e19be2765942/nvidia_cuda_cupti_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:bb479dcdf7e6d4f8b0b01b115260399bf34154a1a2e9fe11c85c517d87efd98e", size = 7015759, upload-time = "2025-03-07T01:51:11.355Z" }, ] [[package]] @@ -4891,6 +4874,8 @@ version = "12.8.93" source = { registry = "https://pypi.org/simple" } wheels = [ { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029, upload-time = "2025-03-07T01:42:13.562Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d1/e50d0acaab360482034b84b6e27ee83c6738f7d32182b987f9c7a4e32962/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc1fec1e1637854b4c0a65fb9a8346b51dd9ee69e61ebaccc82058441f15bce8", size = 43106076, upload-time = "2025-03-07T01:41:59.817Z" }, + { url = "https://files.pythonhosted.org/packages/45/51/52a3d84baa2136cc8df15500ad731d74d3a1114d4c123e043cb608d4a32b/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:7a4b6b2904850fe78e0bd179c4b655c404d4bb799ef03ddc60804247099ae909", size = 73586838, upload-time = "2025-03-07T01:52:13.483Z" }, ] [[package]] @@ -4898,7 +4883,9 @@ name = "nvidia-cuda-runtime-cu12" version = "12.8.90" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/75/f865a3b236e4647605ea34cc450900854ba123834a5f1598e160b9530c3a/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d", size = 965265, upload-time = "2025-03-07T01:39:43.533Z" }, { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, + { url = "https://files.pythonhosted.org/packages/30/a5/a515b7600ad361ea14bfa13fb4d6687abf500adc270f19e89849c0590492/nvidia_cuda_runtime_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:c0c6027f01505bfed6c3b21ec546f69c687689aad5f1a377554bc6ca4aa993a8", size = 944318, upload-time = "2025-03-07T01:51:01.794Z" }, ] [[package]] @@ -4909,7 +4896,9 @@ dependencies = [ { name = "nvidia-cublas-cu12" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/41/e79269ce215c857c935fd86bcfe91a451a584dfc27f1e068f568b9ad1ab7/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c9132cc3f8958447b4910a1720036d9eff5928cc3179b0a51fb6d167c6cc87d8", size = 705026878, upload-time = "2025-06-06T21:52:51.348Z" }, { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467, upload-time = "2025-06-06T21:54:08.597Z" }, + { url = "https://files.pythonhosted.org/packages/3d/90/0bd6e586701b3a890fd38aa71c387dab4883d619d6e5ad912ccbd05bfd67/nvidia_cudnn_cu12-9.10.2.21-py3-none-win_amd64.whl", hash = "sha256:c6288de7d63e6cf62988f0923f96dc339cea362decb1bf5b3141883392a7d65e", size = 692992268, upload-time = "2025-06-06T21:55:18.114Z" }, ] [[package]] @@ -4942,7 +4931,9 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/60/bc/7771846d3a0272026c416fbb7e5f4c1f146d6d80704534d0b187dd6f4800/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a", size = 193109211, upload-time = "2025-03-07T01:44:56.873Z" }, { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ec/ce1629f1e478bb5ccd208986b5f9e0316a78538dd6ab1d0484f012f8e2a1/nvidia_cufft_cu12-11.3.3.83-py3-none-win_amd64.whl", hash = "sha256:7a64a98ef2a7c47f905aaf8931b69a3a43f27c55530c698bb2ed7c75c0b42cb7", size = 192216559, upload-time = "2025-03-07T01:53:57.106Z" }, ] [[package]] @@ -4951,6 +4942,7 @@ version = "1.13.1.3" source = { registry = "https://pypi.org/simple" } wheels = [ { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834, upload-time = "2025-03-07T01:45:50.723Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f5/5607710447a6fe9fd9b3283956fceeee8a06cda1d2f56ce31371f595db2a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:4beb6d4cce47c1a0f1013d72e02b0994730359e17801d395bdcbf20cfb3bb00a", size = 1120705, upload-time = "2025-03-07T01:45:41.434Z" }, ] [[package]] @@ -4958,7 +4950,9 @@ name = "nvidia-curand-cu12" version = "10.3.9.90" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/45/5e/92aa15eca622a388b80fbf8375d4760738df6285b1e92c43d37390a33a9a/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:dfab99248034673b779bc6decafdc3404a8a6f502462201f2f31f11354204acd", size = 63625754, upload-time = "2025-03-07T01:46:10.735Z" }, { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976, upload-time = "2025-03-07T01:46:23.323Z" }, + { url = "https://files.pythonhosted.org/packages/b9/75/70c05b2f3ed5be3bb30b7102b6eb78e100da4bbf6944fd6725c012831cab/nvidia_curand_cu12-10.3.9.90-py3-none-win_amd64.whl", hash = "sha256:f149a8ca457277da854f89cf282d6ef43176861926c7ac85b2a0fbd237c587ec", size = 62765309, upload-time = "2025-03-07T01:54:20.478Z" }, ] [[package]] @@ -4971,7 +4965,9 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/32/f7cd6ce8a7690544d084ea21c26e910a97e077c9b7f07bf5de623ee19981/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0", size = 267229841, upload-time = "2025-03-07T01:46:54.356Z" }, { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, + { url = "https://files.pythonhosted.org/packages/13/c0/76ca8551b8a84146ffa189fec81c26d04adba4bc0dbe09cd6e6fd9b7de04/nvidia_cusolver_cu12-11.7.3.90-py3-none-win_amd64.whl", hash = "sha256:4a550db115fcabc4d495eb7d39ac8b58d4ab5d8e63274d3754df1c0ad6a22d34", size = 256720438, upload-time = "2025-03-07T01:54:39.898Z" }, ] [[package]] @@ -4982,7 +4978,9 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/f7/cd777c4109681367721b00a106f491e0d0d15cfa1fd59672ce580ce42a97/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc", size = 288117129, upload-time = "2025-03-07T01:47:40.407Z" }, { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, + { url = "https://files.pythonhosted.org/packages/62/07/f3b2ad63f8e3d257a599f422ae34eb565e70c41031aecefa3d18b62cabd1/nvidia_cusparse_cu12-12.5.8.93-py3-none-win_amd64.whl", hash = "sha256:9a33604331cb2cac199f2e7f5104dfbb8a5a898c367a53dfda9ff2acb6b6b4dd", size = 284937404, upload-time = "2025-03-07T01:55:07.742Z" }, ] [[package]] @@ -4990,7 +4988,9 @@ name = "nvidia-cusparselt-cu12" version = "0.7.1" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/73/b9/598f6ff36faaece4b3c50d26f50e38661499ff34346f00e057760b35cc9d/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8878dce784d0fac90131b6817b607e803c36e629ba34dc5b433471382196b6a5", size = 283835557, upload-time = "2025-02-26T00:16:54.265Z" }, { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d8/a6b0d0d0c2435e9310f3e2bb0d9c9dd4c33daef86aa5f30b3681defd37ea/nvidia_cusparselt_cu12-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f67fbb5831940ec829c9117b7f33807db9f9678dc2a617fbe781cac17b4e1075", size = 271020911, upload-time = "2025-02-26T00:14:47.204Z" }, ] [[package]] @@ -5038,6 +5038,7 @@ name = "nvidia-nccl-cu12" version = "2.27.5" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/1c/857979db0ef194ca5e21478a0612bcdbbe59458d7694361882279947b349/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:31432ad4d1fb1004eb0c56203dc9bc2178a1ba69d1d9e02d64a6938ab5e40e7a", size = 322400625, upload-time = "2025-06-26T04:11:04.496Z" }, { url = "https://files.pythonhosted.org/packages/6e/89/f7a07dc961b60645dbbf42e80f2bc85ade7feb9a491b11a1e973aa00071f/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ad730cf15cb5d25fe849c6e6ca9eb5b76db16a80f13f425ac68d8e2e55624457", size = 322348229, upload-time = "2025-06-26T04:11:28.385Z" }, ] @@ -5047,6 +5048,8 @@ version = "12.8.93" source = { registry = "https://pypi.org/simple" } wheels = [ { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a2/8cee5da30d13430e87bf99bb33455d2724d0a4a9cb5d7926d80ccb96d008/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:adccd7161ace7261e01bb91e44e88da350895c270d23f744f0820c818b7229e7", size = 38386204, upload-time = "2025-03-07T01:49:43.612Z" }, + { url = "https://files.pythonhosted.org/packages/ed/d7/34f02dad2e30c31b10a51f6b04e025e5dd60e5f936af9045a9b858a05383/nvidia_nvjitlink_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:bd93fbeeee850917903583587f4fc3a4eafa022e34572251368238ab5e6bd67f", size = 268553710, upload-time = "2025-03-07T01:56:24.13Z" }, ] [[package]] @@ -5054,6 +5057,7 @@ name = "nvidia-nvshmem-cu12" version = "3.3.20" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/92/9d/3dd98852568fb845ec1f7902c90a22b240fe1cbabda411ccedf2fd737b7b/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b0b960da3842212758e4fa4696b94f129090b30e5122fea3c5345916545cff0", size = 124484616, upload-time = "2025-08-04T20:24:59.172Z" }, { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145, upload-time = "2025-08-04T20:25:19.995Z" }, ] @@ -5062,7 +5066,9 @@ name = "nvidia-nvtx-cu12" version = "12.8.90" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/10/c0/1b303feea90d296f6176f32a2a70b5ef230f9bdeb3a72bddb0dc922dc137/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d7ad891da111ebafbf7e015d34879f7112832fc239ff0d7d776b6cb685274615", size = 91161, upload-time = "2025-03-07T01:42:23.922Z" }, { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" }, + { url = "https://files.pythonhosted.org/packages/9f/99/4c9c0c329bf9fc125008c3b54c7c94c0023518d06fc025ae36431375e1fe/nvidia_nvtx_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:619c8304aedc69f02ea82dd244541a83c3d9d40993381b3b590f1adaed3db41e", size = 56492, upload-time = "2025-03-07T01:52:24.69Z" }, ] [[package]] @@ -5461,10 +5467,10 @@ wheels = [ [package.optional-dependencies] llamacpp = [ - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, { name = "llama-cpp-python" }, - { name = "numba", version = "0.61.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, - { name = "numba", version = "0.63.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, + { name = "numba" }, ] mlxlm = [ { name = "datasets" }, @@ -5478,7 +5484,8 @@ transformers = [ { name = "accelerate" }, { name = "datasets" }, { name = "setuptools" }, - { name = "transformers" }, + { name = "transformers", version = "4.57.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "transformers", version = "5.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, ] [[package]] @@ -5641,8 +5648,8 @@ name = "pendulum" version = "3.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "python-dateutil", marker = "python_full_version < '3.13'" }, - { name = "tzdata", marker = "python_full_version < '3.13'" }, + { name = "python-dateutil", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "tzdata", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/23/7c/009c12b86c7cc6c403aec80f8a4308598dfc5995e5c523a5491faaa3952e/pendulum-3.1.0.tar.gz", hash = "sha256:66f96303560f41d097bee7d2dc98ffca716fbb3a832c4b3062034c2d45865015", size = 85930, upload-time = "2025-04-19T14:30:01.675Z" } wheels = [ @@ -5863,7 +5870,7 @@ dependencies = [ { name = "orjson" }, { name = "packaging" }, { name = "pathspec" }, - { name = "pendulum", marker = "python_full_version < '3.13'" }, + { name = "pendulum", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "pluggy" }, { name = "prometheus-client" }, { name = "pydantic" }, @@ -5879,7 +5886,7 @@ dependencies = [ { name = "rfc3339-validator" }, { name = "rich" }, { name = "ruamel-yaml" }, - { name = "ruamel-yaml-clib", marker = "platform_python_implementation == 'CPython'" }, + { name = "ruamel-yaml-clib", marker = "platform_python_implementation == 'CPython' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "semver" }, { name = "sniffio" }, { name = "sqlalchemy", extra = ["asyncio"] }, @@ -5888,7 +5895,7 @@ dependencies = [ { name = "typing-extensions" }, { name = "uvicorn" }, { name = "websockets" }, - { name = "whenever", marker = "python_full_version >= '3.13'" }, + { name = "whenever", marker = "python_full_version >= '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5b/46/139cfabbc729f13d4b6df74b56c01ddcbb1118936b312b2ef82e9826d8bc/prefect-3.6.13.tar.gz", hash = "sha256:ee0b39fa390c204ccb3762be00a729edd45c5aa54e0245951f8682f92bfb016b", size = 10811400, upload-time = "2026-01-23T04:17:29.594Z" } wheels = [ @@ -6105,8 +6112,8 @@ name = "psycopg" version = "3.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, - { name = "tzdata", marker = "sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "tzdata", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e0/1a/7d9ef4fdc13ef7f15b934c393edc97a35c281bb7d3c3329fbfcbe915a7c2/psycopg-3.3.2.tar.gz", hash = "sha256:707a67975ee214d200511177a6a80e56e654754c9afca06a7194ea6bbfde9ca7", size = 165630, upload-time = "2025-12-06T17:34:53.899Z" } wheels = [ @@ -6115,7 +6122,7 @@ wheels = [ [package.optional-dependencies] binary = [ - { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, + { name = "psycopg-binary", marker = "implementation_name != 'pypy' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] [[package]] @@ -6523,7 +6530,8 @@ email = [ name = "pydantic-ai" source = { editable = "." } dependencies = [ - { name = "pydantic-ai-slim", extra = ["ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "fastmcp", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "temporal", "ui", "vertexai", "xai"] }, + { name = "pydantic-ai-slim", extra = ["ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "fastmcp", "google", "groq", "logfire", "mcp", "mistral", "openai", "retries", "temporal", "ui", "vertexai", "xai"] }, + { name = "pydantic-ai-slim", extra = ["huggingface"], marker = "extra == 'extra-16-pydantic-ai-slim-huggingface'" }, ] [package.optional-dependencies] @@ -6540,7 +6548,7 @@ outlines-llamacpp = [ { name = "pydantic-ai-slim", extra = ["outlines-llamacpp"] }, ] outlines-mlxlm = [ - { name = "pydantic-ai-slim", extra = ["outlines-mlxlm"], marker = "platform_machine == 'arm64' and sys_platform == 'darwin'" }, + { name = "pydantic-ai-slim", extra = ["outlines-mlxlm"], marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine != 'arm64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'darwin' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] outlines-sglang = [ { name = "pydantic-ai-slim", extra = ["outlines-sglang"] }, @@ -6549,7 +6557,7 @@ outlines-transformers = [ { name = "pydantic-ai-slim", extra = ["outlines-transformers"] }, ] outlines-vllm-offline = [ - { name = "pydantic-ai-slim", extra = ["outlines-vllm-offline"] }, + { name = "pydantic-ai-slim", extra = ["outlines-vllm-offline"], marker = "extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, ] prefect = [ { name = "pydantic-ai-slim", extra = ["prefect"] }, @@ -6712,7 +6720,7 @@ requires-dist = [ name = "pydantic-ai-slim" source = { editable = "pydantic_ai_slim" } dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "genai-prices" }, { name = "griffelib" }, { name = "httpx" }, @@ -6743,11 +6751,14 @@ cli = [ { name = "rich" }, ] cohere = [ - { name = "cohere", marker = "sys_platform != 'emscripten'" }, + { name = "cohere", marker = "sys_platform != 'emscripten' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] dbos = [ { name = "dbos" }, ] +docker-environment = [ + { name = "docker" }, +] duckduckgo = [ { name = "ddgs" }, ] @@ -6767,7 +6778,7 @@ groq = [ { name = "groq" }, ] huggingface = [ - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" } }, ] logfire = [ { name = "logfire", extra = ["httpx"] }, @@ -6786,20 +6797,21 @@ openrouter = [ { name = "openai" }, ] outlines-llamacpp = [ - { name = "outlines", extra = ["llamacpp"], marker = "python_full_version < '3.14'" }, + { name = "outlines", extra = ["llamacpp"], marker = "python_full_version < '3.14' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] outlines-mlxlm = [ - { name = "outlines", extra = ["mlxlm"], marker = "python_full_version < '3.14' and platform_machine == 'arm64' and sys_platform == 'darwin'" }, + { name = "outlines", extra = ["mlxlm"], marker = "(python_full_version < '3.14' and platform_machine == 'arm64' and sys_platform == 'darwin') or (python_full_version >= '3.14' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_machine != 'arm64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'darwin' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] outlines-sglang = [ - { name = "outlines", extra = ["sglang"], marker = "python_full_version < '3.14'" }, - { name = "pillow", marker = "python_full_version < '3.14'" }, + { name = "outlines", extra = ["sglang"], marker = "python_full_version < '3.14' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "pillow", marker = "python_full_version < '3.14' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] outlines-transformers = [ - { name = "outlines", extra = ["transformers"], marker = "(python_full_version < '3.14' and platform_machine != 'x86_64') or (python_full_version < '3.14' and sys_platform != 'darwin')" }, - { name = "pillow", marker = "python_full_version < '3.14'" }, - { name = "torch", marker = "(python_full_version < '3.14' and platform_machine != 'x86_64') or (python_full_version < '3.14' and sys_platform != 'darwin')" }, - { name = "transformers", marker = "python_full_version < '3.14'" }, + { name = "outlines", extra = ["transformers"], marker = "(python_full_version < '3.14' and platform_machine != 'x86_64') or (python_full_version < '3.14' and sys_platform != 'darwin') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "pillow", marker = "python_full_version < '3.14' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "torch", marker = "(python_full_version < '3.14' and platform_machine != 'x86_64') or (python_full_version < '3.14' and sys_platform != 'darwin') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "transformers", version = "4.57.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "transformers", version = "5.2.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and python_full_version < '3.14') or (python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-huggingface') or (python_full_version < '3.12' and extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (python_full_version >= '3.14' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] outlines-vllm-offline = [ { name = "outlines", marker = "python_full_version < '3.14'" }, @@ -6813,7 +6825,7 @@ retries = [ { name = "tenacity" }, ] sentence-transformers = [ - { name = "sentence-transformers", marker = "python_full_version < '3.14'" }, + { name = "sentence-transformers", marker = "python_full_version < '3.14' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] tavily = [ { name = "tavily-python" }, @@ -6829,7 +6841,7 @@ vertexai = [ { name = "requests" }, ] voyageai = [ - { name = "voyageai", marker = "python_full_version < '3.14'" }, + { name = "voyageai", marker = "python_full_version < '3.14' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] web = [ { name = "httpx" }, @@ -6849,6 +6861,7 @@ requires-dist = [ { name = "cohere", marker = "sys_platform != 'emscripten' and extra == 'cohere'", specifier = ">=5.20.6" }, { name = "dbos", marker = "extra == 'dbos'", specifier = ">=2.10.0" }, { name = "ddgs", marker = "extra == 'duckduckgo'", specifier = ">=9.0.0" }, + { name = "docker", marker = "extra == 'docker-environment'", specifier = ">=7.0" }, { name = "exa-py", marker = "extra == 'exa'", specifier = ">=2.0.0" }, { name = "exceptiongroup", marker = "python_full_version < '3.11'", specifier = ">=1.2.2" }, { name = "fasta2a", marker = "extra == 'a2a'", specifier = ">=0.4.1" }, @@ -6899,7 +6912,7 @@ requires-dist = [ { name = "voyageai", marker = "python_full_version < '3.14' and extra == 'voyageai'", specifier = ">=0.3.7" }, { name = "xai-sdk", marker = "extra == 'xai'", specifier = ">=1.5.0" }, ] -provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "exa", "fastmcp", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "openrouter", "outlines-llamacpp", "outlines-mlxlm", "outlines-sglang", "outlines-transformers", "outlines-vllm-offline", "prefect", "retries", "sentence-transformers", "tavily", "temporal", "ui", "vertexai", "voyageai", "web", "xai"] +provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "docker-environment", "duckduckgo", "evals", "exa", "fastmcp", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "openrouter", "outlines-llamacpp", "outlines-mlxlm", "outlines-sglang", "outlines-transformers", "outlines-vllm-offline", "prefect", "retries", "sentence-transformers", "tavily", "temporal", "ui", "vertexai", "voyageai", "web", "xai"] [[package]] name = "pydantic-core" @@ -7117,7 +7130,7 @@ version = "0.16.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cloudpickle" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "fakeredis", extra = ["lua"] }, { name = "opentelemetry-api" }, { name = "opentelemetry-exporter-prometheus" }, @@ -7207,13 +7220,13 @@ name = "pytest" version = "9.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, { name = "pygments" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } wheels = [ @@ -7585,7 +7598,7 @@ name = "redis" version = "7.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, + { name = "async-timeout", marker = "python_full_version < '3.11.3' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" } wheels = [ @@ -7599,7 +7612,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } wheels = [ @@ -8065,7 +8078,7 @@ name = "ruamel-yaml" version = "0.18.17" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "ruamel-yaml-clib", marker = "python_full_version < '3.15' and platform_python_implementation == 'CPython'" }, + { name = "ruamel-yaml-clib", marker = "(python_full_version < '3.15' and platform_python_implementation == 'CPython') or (python_full_version >= '3.15' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (platform_python_implementation != 'CPython' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3a/2b/7a1f1ebcd6b3f14febdc003e658778d81e76b40df2267904ee6b13f0c5c6/ruamel_yaml-0.18.17.tar.gz", hash = "sha256:9091cd6e2d93a3a4b157ddb8fabf348c3de7f1fb1381346d985b6b247dcd8d3c", size = 149602, upload-time = "2025-12-17T20:02:55.757Z" } wheels = [ @@ -8214,10 +8227,10 @@ resolution-markers = [ "python_full_version < '3.11'", ] dependencies = [ - { name = "joblib", marker = "python_full_version < '3.11'" }, - { name = "numpy", marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "threadpoolctl", marker = "python_full_version < '3.11'" }, + { name = "joblib", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "numpy", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "threadpoolctl", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136, upload-time = "2025-09-09T08:21:29.075Z" } wheels = [ @@ -8263,10 +8276,10 @@ resolution-markers = [ "python_full_version == '3.11.*'", ] dependencies = [ - { name = "joblib", marker = "python_full_version >= '3.11'" }, - { name = "numpy", marker = "python_full_version >= '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "threadpoolctl", marker = "python_full_version >= '3.11'" }, + { name = "joblib", marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "numpy", marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "threadpoolctl", marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } wheels = [ @@ -8316,7 +8329,7 @@ resolution-markers = [ "python_full_version < '3.11'", ] dependencies = [ - { name = "numpy", marker = "python_full_version < '3.11'" }, + { name = "numpy", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214, upload-time = "2025-05-08T16:13:05.955Z" } wheels = [ @@ -8377,7 +8390,7 @@ resolution-markers = [ "python_full_version == '3.11.*'", ] dependencies = [ - { name = "numpy", marker = "python_full_version >= '3.11'" }, + { name = "numpy", marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } wheels = [ @@ -8541,15 +8554,17 @@ name = "sentence-transformers" version = "5.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, { name = "numpy" }, - { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "torch" }, { name = "tqdm" }, - { name = "transformers" }, + { name = "transformers", version = "4.57.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "transformers", version = "5.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a6/bc/0bc9c0ec1cf83ab2ec6e6f38667d167349b950fff6dd2086b79bd360eeca/sentence_transformers-5.2.2.tar.gz", hash = "sha256:7033ee0a24bc04c664fd490abf2ef194d387b3a58a97adcc528783ff505159fa", size = 381607, upload-time = "2026-01-27T11:11:02.658Z" } @@ -8798,7 +8813,7 @@ name = "sqlalchemy" version = "2.0.45" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/be/f9/5e4491e5ccf42f5d9cfc663741d261b3e6e1683ae7812114e7636409fcc6/sqlalchemy-2.0.45.tar.gz", hash = "sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88", size = 9869912, upload-time = "2025-12-09T21:05:16.737Z" } @@ -8866,7 +8881,7 @@ version = "0.50.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } wheels = [ @@ -8945,7 +8960,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nexus-rpc" }, { name = "protobuf" }, - { name = "python-dateutil", marker = "python_full_version < '3.11'" }, + { name = "python-dateutil", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "types-protobuf" }, { name = "typing-extensions" }, ] @@ -9063,7 +9078,8 @@ name = "tokenizers" version = "0.22.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "huggingface-hub" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } wheels = [ @@ -9158,26 +9174,26 @@ dependencies = [ { name = "filelock" }, { name = "fsspec" }, { name = "jinja2" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "setuptools", marker = "python_full_version >= '3.12'" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cublas-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cuda-cupti-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cuda-runtime-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cudnn-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cufft-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cufile-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-curand-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cusolver-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cusparse-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-cusparselt-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-nccl-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-nvshmem-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "nvidia-nvtx-cu12", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "setuptools", marker = "python_full_version >= '3.12' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "sympy" }, - { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "triton", marker = "(platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'x86_64' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'linux' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "typing-extensions" }, ] wheels = [ @@ -9294,7 +9310,7 @@ name = "tqdm" version = "4.67.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "colorama", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } wheels = [ @@ -9303,23 +9319,53 @@ wheels = [ [[package]] name = "transformers" -version = "5.0.0" +version = "4.57.6" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.11.*'", + "python_full_version < '3.11'", +] +dependencies = [ + { name = "filelock", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "huggingface-hub", version = "0.36.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "numpy", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "packaging", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "pyyaml", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "regex", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "requests", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "safetensors", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "tokenizers", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, + { name = "tqdm", marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/35/67252acc1b929dc88b6602e8c4a982e64f31e733b804c14bc24b47da35e6/transformers-4.57.6.tar.gz", hash = "sha256:55e44126ece9dc0a291521b7e5492b572e6ef2766338a610b9ab5afbb70689d3", size = 10134912, upload-time = "2026-01-16T10:38:39.284Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/b8/e484ef633af3887baeeb4b6ad12743363af7cce68ae51e938e00aaa0529d/transformers-4.57.6-py3-none-any.whl", hash = "sha256:4c9e9de11333ddfe5114bc872c9f370509198acf0b87a832a0ab9458e2bd0550", size = 11993498, upload-time = "2026-01-16T10:38:31.289Z" }, +] + +[[package]] +name = "transformers" +version = "5.2.0" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", + "python_full_version == '3.11.*'", + "python_full_version < '3.11'", +] dependencies = [ - { name = "filelock" }, - { name = "huggingface-hub" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "pyyaml" }, - { name = "regex" }, - { name = "safetensors" }, - { name = "tokenizers" }, - { name = "tqdm" }, - { name = "typer-slim" }, + { name = "huggingface-hub", version = "1.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "numpy", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "packaging", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "pyyaml", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "regex", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "safetensors", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "tokenizers", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "tqdm", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, + { name = "typer-slim", marker = "python_full_version >= '3.12' or extra == 'extra-16-pydantic-ai-slim-huggingface' or extra != 'extra-16-pydantic-ai-slim-outlines-vllm-offline'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/79/845941711811789c85fb7e2599cea425a14a07eda40f50896b9d3fda7492/transformers-5.0.0.tar.gz", hash = "sha256:5f5634efed6cf76ad068cc5834c7adbc32db78bbd6211fb70df2325a9c37dec8", size = 8424830, upload-time = "2026-01-26T10:46:46.813Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/7e/8a0c57d562015e5b16c97c1f0b8e0e92ead2c7c20513225dc12c2043ba9f/transformers-5.2.0.tar.gz", hash = "sha256:0088b8b46ccc9eff1a1dca72b5d618a5ee3b1befc3e418c9512b35dea9f9a650", size = 8618176, upload-time = "2026-02-16T18:54:02.867Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/f3/ac976fa8e305c9e49772527e09fbdc27cc6831b8a2f6b6063406626be5dd/transformers-5.0.0-py3-none-any.whl", hash = "sha256:587086f249ce64c817213cf36afdb318d087f790723e9b3d4500b97832afd52d", size = 10142091, upload-time = "2026-01-26T10:46:43.88Z" }, + { url = "https://files.pythonhosted.org/packages/4e/93/79754b0ca486e556c2b95d4f5afc66aaf4b260694f3d6e1b51da2d036691/transformers-5.2.0-py3-none-any.whl", hash = "sha256:9ecaf243dc45bee11a7d93f8caf03746accc0cb069181bbf4ad8566c53e854b4", size = 10403304, upload-time = "2026-02-16T18:53:59.699Z" }, ] [[package]] @@ -9327,12 +9373,19 @@ name = "triton" version = "3.5.1" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/2e/f95e673222afa2c7f0c687d8913e98fcf2589ef0b1405de76894e37fe18f/triton-3.5.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f63e34dcb32d7bd3a1d0195f60f30d2aee8b08a69a0424189b71017e23dfc3d2", size = 159821655, upload-time = "2025-11-11T17:51:44.09Z" }, { url = "https://files.pythonhosted.org/packages/fd/6e/676ab5019b4dde8b9b7bab71245102fc02778ef3df48218b298686b9ffd6/triton-3.5.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5fc53d849f879911ea13f4a877243afc513187bc7ee92d1f2c0f1ba3169e3c94", size = 170320692, upload-time = "2025-11-11T17:40:46.074Z" }, + { url = "https://files.pythonhosted.org/packages/dc/dc/6ce44d055f2fc2403c4ec6b3cfd3a9b25f57b7d95efadccdea91497f8e81/triton-3.5.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da47169e30a779bade679ce78df4810fca6d78a955843d2ddb11f226adc517dc", size = 159928005, upload-time = "2025-11-11T17:51:50.008Z" }, { url = "https://files.pythonhosted.org/packages/b0/72/ec90c3519eaf168f22cb1757ad412f3a2add4782ad3a92861c9ad135d886/triton-3.5.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61413522a48add32302353fdbaaf92daaaab06f6b5e3229940d21b5207f47579", size = 170425802, upload-time = "2025-11-11T17:40:53.209Z" }, + { url = "https://files.pythonhosted.org/packages/db/53/2bcc46879910991f09c063eea07627baef2bc62fe725302ba8f46a2c1ae5/triton-3.5.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:275a045b6ed670dd1bd005c3e6c2d61846c74c66f4512d6f33cc027b11de8fd4", size = 159940689, upload-time = "2025-11-11T17:51:55.938Z" }, { url = "https://files.pythonhosted.org/packages/f2/50/9a8358d3ef58162c0a415d173cfb45b67de60176e1024f71fbc4d24c0b6d/triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232", size = 170470207, upload-time = "2025-11-11T17:41:00.253Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ba/805684a992ee32d486b7948d36aed2f5e3c643fc63883bf8bdca1c3f3980/triton-3.5.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56765ffe12c554cd560698398b8a268db1f616c120007bfd8829d27139abd24a", size = 159955460, upload-time = "2025-11-11T17:52:01.861Z" }, { url = "https://files.pythonhosted.org/packages/27/46/8c3bbb5b0a19313f50edcaa363b599e5a1a5ac9683ead82b9b80fe497c8d/triton-3.5.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3f4346b6ebbd4fad18773f5ba839114f4826037c9f2f34e0148894cd5dd3dba", size = 170470410, upload-time = "2025-11-11T17:41:06.319Z" }, + { url = "https://files.pythonhosted.org/packages/84/1e/7df59baef41931e21159371c481c31a517ff4c2517343b62503d0cd2be99/triton-3.5.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02c770856f5e407d24d28ddc66e33cf026e6f4d360dcb8b2fabe6ea1fc758621", size = 160072799, upload-time = "2025-11-11T17:52:07.293Z" }, { url = "https://files.pythonhosted.org/packages/37/92/e97fcc6b2c27cdb87ce5ee063d77f8f26f19f06916aa680464c8104ef0f6/triton-3.5.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d2c70127fca6a23e247f9348b8adde979d2e7a20391bfbabaac6aebc7e6a8", size = 170579924, upload-time = "2025-11-11T17:41:12.455Z" }, + { url = "https://files.pythonhosted.org/packages/14/f9/0430e879c1e63a1016cb843261528fd3187c872c3a9539132efc39514753/triton-3.5.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f617aa7925f9ea9968ec2e1adaf93e87864ff51549c8f04ce658f29bbdb71e2d", size = 159956163, upload-time = "2025-11-11T17:52:12.999Z" }, { url = "https://files.pythonhosted.org/packages/a4/e6/c595c35e5c50c4bc56a7bac96493dad321e9e29b953b526bbbe20f9911d0/triton-3.5.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0637b1efb1db599a8e9dc960d53ab6e4637db7d4ab6630a0974705d77b14b60", size = 170480488, upload-time = "2025-11-11T17:41:18.222Z" }, + { url = "https://files.pythonhosted.org/packages/41/1e/63d367c576c75919e268e4fbc33c1cb33b6dc12bb85e8bfe531c2a8bd5d3/triton-3.5.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8932391d7f93698dfe5bc9bead77c47a24f97329e9f20c10786bb230a9083f56", size = 160073620, upload-time = "2025-11-11T17:52:18.403Z" }, { url = "https://files.pythonhosted.org/packages/16/b5/b0d3d8b901b6a04ca38df5e24c27e53afb15b93624d7fd7d658c7cd9352a/triton-3.5.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bac7f7d959ad0f48c0e97d6643a1cc0fd5786fe61cb1f83b537c6b2d54776478", size = 170582192, upload-time = "2025-11-11T17:41:23.963Z" }, ] @@ -9456,7 +9509,7 @@ name = "tzlocal" version = "5.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "tzdata", marker = "sys_platform == 'win32'" }, + { name = "tzdata", marker = "sys_platform == 'win32' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } wheels = [ @@ -9508,7 +9561,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } wheels = [ @@ -9598,7 +9651,7 @@ dependencies = [ { name = "depyf", marker = "python_full_version < '3.12'" }, { name = "diskcache", marker = "python_full_version < '3.12'" }, { name = "einops", marker = "python_full_version < '3.12'" }, - { name = "fastapi", extra = ["standard"], marker = "python_full_version < '3.12'" }, + { name = "fastapi", extra = ["standard"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "filelock", marker = "python_full_version < '3.12'" }, { name = "flashinfer-python", marker = "python_full_version < '3.12'" }, { name = "gguf", marker = "python_full_version < '3.12'" }, @@ -9609,11 +9662,11 @@ dependencies = [ { name = "llguidance", marker = "(python_full_version < '3.12' and platform_machine == 'aarch64') or (python_full_version < '3.12' and platform_machine == 'arm64') or (python_full_version < '3.12' and platform_machine == 'ppc64le') or (python_full_version < '3.12' and platform_machine == 's390x') or (python_full_version < '3.12' and platform_machine == 'x86_64')" }, { name = "lm-format-enforcer", marker = "python_full_version < '3.12'" }, { name = "mcp", marker = "python_full_version < '3.12'" }, - { name = "mistral-common", extra = ["image"], marker = "python_full_version < '3.12'" }, + { name = "mistral-common", extra = ["image"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "model-hosting-container-standards", marker = "python_full_version < '3.12'" }, { name = "msgspec", marker = "python_full_version < '3.12'" }, { name = "ninja", marker = "python_full_version < '3.12'" }, - { name = "numba", version = "0.61.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, + { name = "numba", marker = "python_full_version < '3.12'" }, { name = "numpy", marker = "python_full_version < '3.12'" }, { name = "openai", marker = "python_full_version < '3.12'" }, { name = "openai-harmony", marker = "python_full_version < '3.12'" }, @@ -9631,7 +9684,7 @@ dependencies = [ { name = "python-json-logger", marker = "python_full_version < '3.12'" }, { name = "pyyaml", marker = "python_full_version < '3.12'" }, { name = "pyzmq", marker = "python_full_version < '3.12'" }, - { name = "ray", extra = ["cgraph"], marker = "python_full_version < '3.12'" }, + { name = "ray", extra = ["cgraph"], marker = "(python_full_version < '3.12' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "regex", marker = "python_full_version < '3.12'" }, { name = "requests", marker = "python_full_version < '3.12'" }, { name = "sentencepiece", marker = "python_full_version < '3.12'" }, @@ -9642,7 +9695,7 @@ dependencies = [ { name = "torchaudio", marker = "python_full_version < '3.12'" }, { name = "torchvision", marker = "python_full_version < '3.12'" }, { name = "tqdm", marker = "python_full_version < '3.12'" }, - { name = "transformers", marker = "python_full_version < '3.12'" }, + { name = "transformers", version = "4.57.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "watchfiles", marker = "python_full_version < '3.12'" }, { name = "xgrammar", marker = "(python_full_version < '3.12' and platform_machine == 'aarch64') or (python_full_version < '3.12' and platform_machine == 'arm64') or (python_full_version < '3.12' and platform_machine == 'ppc64le') or (python_full_version < '3.12' and platform_machine == 's390x') or (python_full_version < '3.12' and platform_machine == 'x86_64')" }, @@ -9661,7 +9714,7 @@ dependencies = [ { name = "aiolimiter" }, { name = "ffmpeg-python" }, { name = "langchain-text-splitters" }, - { name = "numpy", marker = "python_full_version < '3.14'" }, + { name = "numpy", marker = "python_full_version < '3.14' or (extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, { name = "pillow" }, { name = "pydantic" }, { name = "requests" }, @@ -9890,7 +9943,7 @@ name = "whenever" version = "0.8.10" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "tzdata", marker = "python_full_version >= '3.13' and sys_platform == 'win32'" }, + { name = "tzdata", marker = "(python_full_version >= '3.13' and sys_platform == 'win32') or (python_full_version < '3.13' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline') or (sys_platform != 'win32' and extra == 'extra-16-pydantic-ai-slim-huggingface' and extra == 'extra-16-pydantic-ai-slim-outlines-vllm-offline')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4d/67/cfc23dfe54ced1e4388826b29db9b9ab2c70a342b33b7e92cf15866f35a6/whenever-0.8.10.tar.gz", hash = "sha256:5e2a3da71527e299f98eec5bb38c4e79d9527a127107387456125005884fb235", size = 240223, upload-time = "2025-10-16T20:31:23.538Z" } wheels = [ @@ -10072,20 +10125,23 @@ dependencies = [ { name = "numpy", marker = "python_full_version < '3.12'" }, { name = "pydantic", marker = "python_full_version < '3.12'" }, { name = "torch", marker = "python_full_version < '3.12'" }, - { name = "transformers", marker = "python_full_version < '3.12'" }, + { name = "transformers", version = "4.57.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, { name = "triton", marker = "python_full_version < '3.12' and platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/02/a3/70dbe3ffd331a1e7e1ad5a95690a4086e6c7cdb8089f5c7eda712219ccec/xgrammar-0.1.29.tar.gz", hash = "sha256:cf195afa81b489eebf35d4c6f37f27136d05420739ab4a6f7f065c938d7e4baa", size = 2321317, upload-time = "2025-12-19T08:23:54.53Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/6d/6384619408da47411c71b2baed3d4bc509a4a9aa0a63d738709b516869b5/xgrammar-0.1.29-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:fdc66e834b915cf956168ac086bd577f138261644b944e73d73f07085682a4d8", size = 16008147, upload-time = "2025-12-19T08:22:59.54Z" }, { url = "https://files.pythonhosted.org/packages/a8/2d/6ead6206bda4582620b176f02840254183c61682e20041a2d950d6f1ee7a/xgrammar-0.1.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48c5a5c60c5ca5ab09ff5ef9f6b382384a04b153bae5908006cd4f7d80d71e07", size = 17914539, upload-time = "2025-12-19T08:23:02.011Z" }, { url = "https://files.pythonhosted.org/packages/04/75/5305fe75823489c160dec8ee2a95a631e44a690eacec765469e513aca738/xgrammar-0.1.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cea3e65d60f8e55568dbb1457e6c4da6d381262a9b1211fe023630630b733d8", size = 34702454, upload-time = "2025-12-19T08:23:05.143Z" }, { url = "https://files.pythonhosted.org/packages/af/3c/7426aadf64a4ecfc1a1966babc57e4694235bf50392e96c506f930a4cdbe/xgrammar-0.1.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:866882b58ac654a1d1cd5e0c1ac67824b730aff8a40f9f19f0e8938a107dcd8a", size = 34903300, upload-time = "2025-12-19T08:23:08.098Z" }, { url = "https://files.pythonhosted.org/packages/05/f5/17ebcb575bd105cbcb5fee3c69906cee2423dbfdd73a18a60e205a619244/xgrammar-0.1.29-cp310-cp310-win_amd64.whl", hash = "sha256:8551dae4d38bd20c36a12c90a2954c3832bb6397211fc3aeba0b0d7920a1ea4b", size = 5928622, upload-time = "2025-12-19T08:23:10.485Z" }, + { url = "https://files.pythonhosted.org/packages/c6/de/88832fac40962fd0d4703bd4ba84598b06b8408bdc4a6722744f363f68a6/xgrammar-0.1.29-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:d2a7eef1b75b8d31b868d5c79855622aad203275ff267fc0e0ef77dd91906cfe", size = 16008004, upload-time = "2025-12-19T08:23:11.998Z" }, { url = "https://files.pythonhosted.org/packages/76/f6/4d22eec5305657430955442077306bc6ed85becc564116165d4b3a7049ad/xgrammar-0.1.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4af7f6ce2b2c6295b936b7cbda09f78e33f2c492a139cd64560f5d8d0fe967ed", size = 17914326, upload-time = "2025-12-19T08:23:14.43Z" }, { url = "https://files.pythonhosted.org/packages/87/0b/b5e5c99ce13a9d378a940cda07c5a08b50cc7efb66936c6ac8fa8232a0d5/xgrammar-0.1.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51bcfd63bd48a0b26209ffd2143a42067518559355ec9e4e574cef2ae74fac7c", size = 34699408, upload-time = "2025-12-19T08:23:16.906Z" }, { url = "https://files.pythonhosted.org/packages/a3/a0/4ebc1b3f5af79a3f73d0566034758f3fbcd9c64174646314a9a6f7cc1d27/xgrammar-0.1.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e27b50cf8c565845295a8263a4a0790c00a7c1fd783e76222fc0f575654d6f56", size = 34903461, upload-time = "2025-12-19T08:23:19.556Z" }, { url = "https://files.pythonhosted.org/packages/77/21/f6b3978dc9761bbfbbb153d33441206ce2253efa271d8e2d8b6b210d2bd7/xgrammar-0.1.29-cp311-cp311-win_amd64.whl", hash = "sha256:c9f8ea76bcf41b48168974b509b1546d2bee289ff1b20c68bc97434c1ea6e49a", size = 5928633, upload-time = "2025-12-19T08:23:21.67Z" }, + { url = "https://files.pythonhosted.org/packages/c1/d8/fb282fc78be6e9bbefb5cb389f66b22e4efd6ae14f06234f599651620da5/xgrammar-0.1.29-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:d992a3cee7594bbdaa64ae59f90da5ce21c5fe654719df3816014289ada6f04d", size = 16007376, upload-time = "2025-12-19T08:23:23.634Z" }, { url = "https://files.pythonhosted.org/packages/82/a7/2c9767620ee50f2f40f1eb95e55a3a29e1a0670f087ee6dc1bc1c887b906/xgrammar-0.1.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1bbdf02e45cfa8614218ba01ca7952d375f8bc1c13884e3d04daa4b54180cbc2", size = 17913535, upload-time = "2025-12-19T08:23:26.02Z" }, { url = "https://files.pythonhosted.org/packages/57/94/18793c64bf0368075a34c06e196bf002f1e6ab0aee332268f44e8d356d5a/xgrammar-0.1.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eb370a16b27a683e5f2b9e429ab41440c69977d4a504849ed61831b94cc704c", size = 34705239, upload-time = "2025-12-19T08:23:28.369Z" }, { url = "https://files.pythonhosted.org/packages/3e/da/4c14e3e00be698009b52700f15326a23272b4b00475939b6acc86b151188/xgrammar-0.1.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79e6e4f5cd33be77418cf91efc482f2b3d773d309891224383bc8a4948ad7b07", size = 34906135, upload-time = "2025-12-19T08:23:30.838Z" },