Skip to content
Open
Show file tree
Hide file tree
Changes from 17 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
f667f4b
feat(integrations): added initial Pydantic AI integration implementation
constantinius Oct 9, 2025
8c26119
fix: model name lookup and message history preservation
constantinius Oct 9, 2025
742d77f
feat: add support for run_stream
constantinius Oct 9, 2025
76a7a67
fix: deduping code
constantinius Oct 9, 2025
9d166ba
fix(integrations): add pydantic-ai as an optional dependency
constantinius Oct 10, 2025
3337c19
fix(integrations): fixing span description -> name
constantinius Oct 10, 2025
bf3ce00
feat: add include_prompts for Pydantic AI integration
constantinius Oct 10, 2025
2290901
Merge branch 'master' into constantinius/feat/integration/pydantic-ai
sentrivana Oct 10, 2025
c7e7ec2
Add pydantic ai to ci
sentrivana Oct 10, 2025
1a2cb97
add pytest-asyncio
sentrivana Oct 10, 2025
b9f3357
Merge branch 'master' into constantinius/feat/integration/pydantic-ai
sentrivana Oct 10, 2025
48af290
fix: mypy issues
constantinius Oct 10, 2025
7263a77
fix: working in feedback
constantinius Oct 13, 2025
a94870e
Update sentry_sdk/integrations/pydantic_ai/spans/invoke_agent.py
constantinius Oct 15, 2025
7f335f9
fix(integrations): cleanups and working in feedback
constantinius Oct 15, 2025
0929e58
fix(integrations): type checking import missing
constantinius Oct 15, 2025
4b40d2d
fix: significantly simplifying instrumentation
constantinius Oct 22, 2025
87ebafc
feat: add support for MCP Tool calls as well
constantinius Oct 22, 2025
9e20ebb
test: update tests to reflect recent changes
constantinius Oct 23, 2025
b25c434
fix: working in feedback, making object typechecks less brittle
constantinius Oct 23, 2025
97d6b52
fix: shuffling imports so that the DidNotEnable exception is actually…
constantinius Oct 23, 2025
0a32559
ci: -m revert: wrong integration sentry_sdk/integrations/openai_agent…
constantinius Oct 23, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/test-integrations-ai.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,10 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-openai_agents"
- name: Test pydantic_ai
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-pydantic_ai"
- name: Generate coverage XML (Python 3.6)
if: ${{ !cancelled() && matrix.python-version == '3.6' }}
run: |
Expand Down
6 changes: 6 additions & 0 deletions scripts/populate_tox/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,12 @@
"package": "pure_eval",
"num_versions": 2,
},
"pydantic_ai": {
"package": "pydantic-ai",
"deps": {
"*": ["pytest-asyncio"],
},
},
"pymongo": {
"package": "pymongo",
"deps": {
Expand Down
1 change: 1 addition & 0 deletions scripts/populate_tox/releases.jsonl
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@
{"info": {"classifiers": ["License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3"], "name": "openfeature-sdk", "requires_python": ">=3.9", "version": "0.8.3", "yanked": false}}
{"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8"], "name": "pure-eval", "requires_python": "", "version": "0.0.3", "yanked": false}}
{"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9"], "name": "pure-eval", "requires_python": null, "version": "0.2.3", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Framework :: Pydantic", "Framework :: Pydantic :: 2", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Topic :: Internet", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Software Development :: Libraries :: Python Modules"], "name": "pydantic-ai", "requires_python": ">=3.10", "version": "1.0.17", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python", "Topic :: Database"], "name": "pymongo", "requires_python": null, "version": "0.6", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.4", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database"], "name": "pymongo", "requires_python": null, "version": "2.8.1", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database"], "name": "pymongo", "requires_python": "", "version": "3.13.0", "yanked": false}}
Expand Down
1 change: 1 addition & 0 deletions scripts/split_tox_gh_actions/split_tox_gh_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@
"openai-base",
"openai-notiktoken",
"openai_agents",
"pydantic_ai",
],
"Cloud": [
"aws_lambda",
Expand Down
1 change: 1 addition & 0 deletions sentry_sdk/integrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ def iter_default_integrations(with_auto_enabling_integrations):
"openai": (1, 0, 0),
"openai_agents": (0, 0, 19),
"openfeature": (0, 7, 1),
"pydantic_ai": (1, 0, 0),
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: AI Integration Auto-Enablement Issue

The PydanticAIIntegration was added to _MIN_VERSIONS but is missing from _AUTO_ENABLING_INTEGRATIONS. This prevents it from being automatically enabled when pydantic_ai is present, which is inconsistent with other AI integrations and its auto.ai.pydantic_ai origin.

Fix in Cursor Fix in Web

"quart": (0, 16, 0),
"ray": (2, 7, 0),
"requests": (2, 0, 0),
Expand Down
45 changes: 45 additions & 0 deletions sentry_sdk/integrations/pydantic_ai/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from sentry_sdk.integrations import DidNotEnable, Integration

from .patches import (
_patch_agent_run,
_patch_graph_nodes,
_patch_model_request,
_patch_tool_execution,
)

try:
import pydantic_ai
except ImportError:
raise DidNotEnable("pydantic-ai not installed")


class PydanticAIIntegration(Integration):
identifier = "pydantic_ai"
origin = f"auto.ai.{identifier}"

def __init__(self, include_prompts=True):
# type: (bool) -> None
"""
Initialize the Pydantic AI integration.

Args:
include_prompts: Whether to include prompts and messages in span data.
Requires send_default_pii=True. Defaults to True.
"""
self.include_prompts = include_prompts

@staticmethod
def setup_once():
# type: () -> None
"""
Set up the pydantic-ai integration.

This patches the key methods in pydantic-ai to create Sentry spans for:
- Agent invocations (Agent.run methods)
- Model requests (AI client calls)
- Tool executions
"""
_patch_agent_run()
_patch_graph_nodes()
_patch_model_request()
_patch_tool_execution()
1 change: 1 addition & 0 deletions sentry_sdk/integrations/pydantic_ai/consts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SPAN_ORIGIN = "auto.ai.pydantic_ai"
4 changes: 4 additions & 0 deletions sentry_sdk/integrations/pydantic_ai/patches/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .agent_run import _patch_agent_run # noqa: F401
from .graph_nodes import _patch_graph_nodes # noqa: F401
from .model_request import _patch_model_request # noqa: F401
from .tools import _patch_tool_execution # noqa: F401
238 changes: 238 additions & 0 deletions sentry_sdk/integrations/pydantic_ai/patches/agent_run.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
from functools import wraps

import sentry_sdk
from sentry_sdk.tracing_utils import set_span_errored
from sentry_sdk.utils import event_from_exception

from ..spans import invoke_agent_span, update_invoke_agent_span

from typing import TYPE_CHECKING
from pydantic_ai.agent import Agent

if TYPE_CHECKING:
from typing import Any, Callable, Optional


def _capture_exception(exc):
# type: (Any) -> None
set_span_errored()

event, hint = event_from_exception(
exc,
client_options=sentry_sdk.get_client().options,
mechanism={"type": "pydantic_ai", "handled": False},
)
sentry_sdk.capture_event(event, hint=hint)


class _StreamingContextManagerWrapper:
"""Wrapper for streaming methods that return async context managers."""

def __init__(
self,
agent,
original_ctx_manager,
user_prompt,
model,
model_settings,
is_streaming=True,
):
# type: (Any, Any, Any, Any, Any, bool) -> None
self.agent = agent
self.original_ctx_manager = original_ctx_manager
self.user_prompt = user_prompt
self.model = model
self.model_settings = model_settings
self.is_streaming = is_streaming
self._isolation_scope = None # type: Any
self._span = None # type: Optional[sentry_sdk.tracing.Span]
self._result = None # type: Any

async def __aenter__(self):
# type: () -> Any
# Set up isolation scope and invoke_agent span
self._isolation_scope = sentry_sdk.isolation_scope()
self._isolation_scope.__enter__()

# Store agent reference and streaming flag
sentry_sdk.get_current_scope().set_context(
"pydantic_ai_agent", {"_agent": self.agent, "_streaming": self.is_streaming}
)

# Create invoke_agent span (will be closed in __aexit__)
self._span = invoke_agent_span(
self.user_prompt, self.agent, self.model, self.model_settings
)
self._span.__enter__()

# Enter the original context manager
result = await self.original_ctx_manager.__aenter__()
self._result = result
return result

async def __aexit__(self, exc_type, exc_val, exc_tb):
# type: (Any, Any, Any) -> None
try:
# Exit the original context manager first
await self.original_ctx_manager.__aexit__(exc_type, exc_val, exc_tb)

# Update span with output if successful
if exc_type is None and self._result and hasattr(self._result, "output"):
output = (
self._result.output if hasattr(self._result, "output") else None
)
update_invoke_agent_span(self._span, output)
finally:
# Clean up invoke span
if self._span:
self._span.__exit__(exc_type, exc_val, exc_tb)

# Clean up isolation scope
if self._isolation_scope:
self._isolation_scope.__exit__(exc_type, exc_val, exc_tb)


def _create_run_wrapper(original_func, is_streaming=False):
# type: (Callable[..., Any], bool) -> Callable[..., Any]
"""
Wraps the Agent.run method to create an invoke_agent span.
Args:
original_func: The original run method
is_streaming: Whether this is a streaming method (for future use)
"""

@wraps(original_func)
async def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Isolate each workflow so that when agents are run in asyncio tasks they
# don't touch each other's scopes
with sentry_sdk.isolation_scope():
# Store agent reference and streaming flag in Sentry scope for access in nested spans
# We store the full agent to allow access to tools and system prompts
sentry_sdk.get_current_scope().set_context(
"pydantic_ai_agent", {"_agent": self, "_streaming": is_streaming}
)

# Extract parameters for the span
user_prompt = kwargs.get("user_prompt") or (args[0] if args else None)
model = kwargs.get("model")
model_settings = kwargs.get("model_settings")

# Create invoke_agent span
with invoke_agent_span(user_prompt, self, model, model_settings) as span:
try:
result = await original_func(self, *args, **kwargs)

# Update span with output
output = result.output if hasattr(result, "output") else None
update_invoke_agent_span(span, output)

return result
except Exception as exc:
_capture_exception(exc)
raise exc from None

return wrapper


def _create_run_sync_wrapper(original_func):
# type: (Callable[..., Any]) -> Callable[..., Any]
"""
Wraps the Agent.run_sync method - no span needed as it delegates to run().
Note: run_sync just calls self.run() via run_until_complete, so the
invoke_agent span will be created by the run() wrapper.
"""

@wraps(original_func)
def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Just call the original function - it will call run() which has the instrumentation
try:
result = original_func(self, *args, **kwargs)
return result
except Exception as exc:
_capture_exception(exc)
raise exc from None

return wrapper


def _create_streaming_wrapper(original_func):
# type: (Callable[..., Any]) -> Callable[..., Any]
"""
Wraps run_stream method that returns an async context manager.
"""

@wraps(original_func)
def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Extract parameters for the span
user_prompt = kwargs.get("user_prompt") or (args[0] if args else None)
model = kwargs.get("model")
model_settings = kwargs.get("model_settings")

# Call original function to get the context manager
original_ctx_manager = original_func(self, *args, **kwargs)

# Wrap it with our instrumentation
return _StreamingContextManagerWrapper(
agent=self,
original_ctx_manager=original_ctx_manager,
user_prompt=user_prompt,
model=model,
model_settings=model_settings,
is_streaming=True,
)

return wrapper


def _create_streaming_events_wrapper(original_func):
# type: (Callable[..., Any]) -> Callable[..., Any]
"""
Wraps run_stream_events method - no span needed as it delegates to run().
Note: run_stream_events internally calls self.run() with an event_stream_handler,
so the invoke_agent span will be created by the run() wrapper.
"""

@wraps(original_func)
async def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Just call the original generator - it will call run() which has the instrumentation
try:
async for event in original_func(self, *args, **kwargs):
yield event
except Exception as exc:
_capture_exception(exc)
raise exc from None

return wrapper


def _patch_agent_run():
# type: () -> None
"""
Patches the Agent run methods to create spans for agent execution.
This patches both non-streaming (run, run_sync) and streaming
(run_stream, run_stream_events) methods.
"""

# Store original methods
original_run = Agent.run
original_run_sync = Agent.run_sync
original_run_stream = Agent.run_stream
original_run_stream_events = Agent.run_stream_events

# Wrap and apply patches for non-streaming methods
Agent.run = _create_run_wrapper(original_run, is_streaming=False) # type: ignore
Agent.run_sync = _create_run_sync_wrapper(original_run_sync) # type: ignore

# Wrap and apply patches for streaming methods
Agent.run_stream = _create_streaming_wrapper(original_run_stream) # type: ignore
Agent.run_stream_events = _create_streaming_events_wrapper( # type: ignore[method-assign]
original_run_stream_events
)
Loading
Loading