Skip to content
Open
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/test-integrations-ai.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,10 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-openai_agents"
- name: Test pydantic_ai
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-pydantic_ai"
- name: Generate coverage XML (Python 3.6)
if: ${{ !cancelled() && matrix.python-version == '3.6' }}
run: |
Expand Down
6 changes: 6 additions & 0 deletions scripts/populate_tox/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,12 @@
"package": "pure_eval",
"num_versions": 2,
},
"pydantic_ai": {
"package": "pydantic-ai",
"deps": {
"*": ["pytest-asyncio"],
},
},
"pymongo": {
"package": "pymongo",
"deps": {
Expand Down
1 change: 1 addition & 0 deletions scripts/populate_tox/releases.jsonl
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@
{"info": {"classifiers": ["License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3"], "name": "openfeature-sdk", "requires_python": ">=3.9", "version": "0.8.3", "yanked": false}}
{"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8"], "name": "pure-eval", "requires_python": "", "version": "0.0.3", "yanked": false}}
{"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9"], "name": "pure-eval", "requires_python": null, "version": "0.2.3", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Framework :: Pydantic", "Framework :: Pydantic :: 2", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Topic :: Internet", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Software Development :: Libraries :: Python Modules"], "name": "pydantic-ai", "requires_python": ">=3.10", "version": "1.0.17", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python", "Topic :: Database"], "name": "pymongo", "requires_python": null, "version": "0.6", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.4", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database"], "name": "pymongo", "requires_python": null, "version": "2.8.1", "yanked": false}}
{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database"], "name": "pymongo", "requires_python": "", "version": "3.13.0", "yanked": false}}
Expand Down
1 change: 1 addition & 0 deletions scripts/split_tox_gh_actions/split_tox_gh_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@
"openai-base",
"openai-notiktoken",
"openai_agents",
"pydantic_ai",
],
"Cloud": [
"aws_lambda",
Expand Down
1 change: 1 addition & 0 deletions sentry_sdk/integrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ def iter_default_integrations(with_auto_enabling_integrations):
"openai": (1, 0, 0),
"openai_agents": (0, 0, 19),
"openfeature": (0, 7, 1),
"pydantic_ai": (1, 0, 0),
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: AI Integration Auto-Enablement Issue

The PydanticAIIntegration was added to _MIN_VERSIONS but is missing from _AUTO_ENABLING_INTEGRATIONS. This prevents it from being automatically enabled when pydantic_ai is present, which is inconsistent with other AI integrations and its auto.ai.pydantic_ai origin.

Fix in Cursor Fix in Web

"quart": (0, 16, 0),
"ray": (2, 7, 0),
"requests": (2, 0, 0),
Expand Down
47 changes: 47 additions & 0 deletions sentry_sdk/integrations/pydantic_ai/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from sentry_sdk.integrations import DidNotEnable, Integration

from .patches import (
_patch_agent_run,
_patch_graph_nodes,
_patch_model_request,
_patch_tool_execution,
)

try:
import pydantic_ai

except ImportError:
raise DidNotEnable("pydantic-ai not installed")


class PydanticAIIntegration(Integration):
identifier = "pydantic_ai"
origin = f"auto.ai.{identifier}"

def __init__(self, include_prompts=True):
# type: (bool) -> None
"""
Initialize the Pydantic AI integration.

Args:
include_prompts: Whether to include prompts and messages in span data.
Requires send_default_pii=True. Defaults to True.
"""
self.include_prompts = include_prompts

@staticmethod
def setup_once():
# type: () -> None
"""
Set up the pydantic-ai integration.

This patches the key methods in pydantic-ai to create Sentry spans for:
- Agent workflow execution (root span)
- Individual agent invocations
- Model requests (AI client calls)
- Tool executions
"""
_patch_agent_run()
_patch_graph_nodes()
_patch_model_request()
_patch_tool_execution()
1 change: 1 addition & 0 deletions sentry_sdk/integrations/pydantic_ai/consts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SPAN_ORIGIN = "auto.ai.pydantic_ai"
4 changes: 4 additions & 0 deletions sentry_sdk/integrations/pydantic_ai/patches/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .agent_run import _patch_agent_run # noqa: F401
from .graph_nodes import _patch_graph_nodes # noqa: F401
from .model_request import _patch_model_request # noqa: F401
from .tools import _patch_tool_execution # noqa: F401
221 changes: 221 additions & 0 deletions sentry_sdk/integrations/pydantic_ai/patches/agent_run.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
from functools import wraps

import sentry_sdk
from sentry_sdk.integrations import DidNotEnable

from ..spans import agent_workflow_span, invoke_agent_span, update_invoke_agent_span
from ..utils import _capture_exception

from typing import TYPE_CHECKING

if TYPE_CHECKING:
from typing import Any, Callable, Optional

try:
import pydantic_ai
from pydantic_ai.agent import Agent
except ImportError:
raise DidNotEnable("pydantic-ai not installed")


class _StreamingContextManagerWrapper:
"""Wrapper for streaming methods that return async context managers."""

def __init__(self, agent, original_ctx_manager, is_streaming=True):
# type: (Any, Any, bool) -> None
self.agent = agent
self.original_ctx_manager = original_ctx_manager
self.is_streaming = is_streaming
self._isolation_scope = None # type: Any
self._workflow_span = None # type: Optional[sentry_sdk.tracing.Span]

async def __aenter__(self):
# type: () -> Any
# Set up isolation scope and workflow span
self._isolation_scope = sentry_sdk.isolation_scope()
self._isolation_scope.__enter__()

# Store agent reference and streaming flag
sentry_sdk.get_current_scope().set_context(
"pydantic_ai_agent", {"_agent": self.agent, "_streaming": self.is_streaming}
)

# Create workflow span
self._workflow_span = agent_workflow_span(self.agent)
self._workflow_span.__enter__()

# Enter the original context manager
result = await self.original_ctx_manager.__aenter__()
return result

async def __aexit__(self, exc_type, exc_val, exc_tb):
# type: (Any, Any, Any) -> None
try:
# Exit the original context manager first
await self.original_ctx_manager.__aexit__(exc_type, exc_val, exc_tb)
finally:
# Clean up workflow span
if self._workflow_span:
self._workflow_span.__exit__(exc_type, exc_val, exc_tb)

# Clean up isolation scope
if self._isolation_scope:
self._isolation_scope.__exit__(exc_type, exc_val, exc_tb)


def _create_run_wrapper(original_func, is_streaming=False):
# type: (Callable[..., Any], bool) -> Callable[..., Any]
"""
Wraps the Agent.run method to create a root span for the agent workflow.
Args:
original_func: The original run method
is_streaming: Whether this is a streaming method (for future use)
"""

@wraps(original_func)
async def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Isolate each workflow so that when agents are run in asyncio tasks they
# don't touch each other's scopes
with sentry_sdk.isolation_scope():
# Store agent reference and streaming flag in Sentry scope for access in nested spans
# We store the full agent to allow access to tools and system prompts
sentry_sdk.get_current_scope().set_context(
"pydantic_ai_agent", {"_agent": self, "_streaming": is_streaming}
)

with agent_workflow_span(self):
result = None
try:
result = await original_func(self, *args, **kwargs)
return result
except Exception as exc:
_capture_exception(exc)

# It could be that there is an "invoke agent" span still open
current_span = sentry_sdk.get_current_span()
if current_span is not None and current_span.timestamp is None:
current_span.__exit__(None, None, None)

raise exc from None

return wrapper


def _create_run_sync_wrapper(original_func):
# type: (Callable[..., Any]) -> Callable[..., Any]
"""
Wraps the Agent.run_sync method to create a root span for the agent workflow.
Note: run_sync is always non-streaming.
"""

@wraps(original_func)
def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Isolate each workflow so that when agents are run they
# don't touch each other's scopes
with sentry_sdk.isolation_scope():
# Store agent reference and streaming flag in Sentry scope for access in nested spans
# We store the full agent to allow access to tools and system prompts
sentry_sdk.get_current_scope().set_context(
"pydantic_ai_agent", {"_agent": self, "_streaming": False}
)

with agent_workflow_span(self):
result = None
try:
result = original_func(self, *args, **kwargs)
return result
except Exception as exc:
_capture_exception(exc)

# It could be that there is an "invoke agent" span still open
current_span = sentry_sdk.get_current_span()
if current_span is not None and current_span.timestamp is None:
current_span.__exit__(None, None, None)

raise exc from None

return wrapper


def _create_streaming_wrapper(original_func):
# type: (Callable[..., Any]) -> Callable[..., Any]
"""
Wraps run_stream method that returns an async context manager.
"""

@wraps(original_func)
def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Call original function to get the context manager
original_ctx_manager = original_func(self, *args, **kwargs)

# Wrap it with our instrumentation
return _StreamingContextManagerWrapper(
agent=self, original_ctx_manager=original_ctx_manager, is_streaming=True
)

return wrapper


def _create_streaming_events_wrapper(original_func):
# type: (Callable[..., Any]) -> Callable[..., Any]
"""
Wraps run_stream_events method that returns an async generator/iterator.
"""

@wraps(original_func)
async def wrapper(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
# Isolate each workflow so that when agents are run in asyncio tasks they
# don't touch each other's scopes
with sentry_sdk.isolation_scope():
# Store agent reference and streaming flag in Sentry scope for access in nested spans
sentry_sdk.get_current_scope().set_context(
"pydantic_ai_agent", {"_agent": self, "_streaming": True}
)

with agent_workflow_span(self):
try:
# Call the original generator and yield all events
async for event in original_func(self, *args, **kwargs):
yield event
except Exception as exc:
_capture_exception(exc)

# It could be that there is an "invoke agent" span still open
current_span = sentry_sdk.get_current_span()
if current_span is not None and current_span.timestamp is None:
current_span.__exit__(None, None, None)

raise exc from None

return wrapper


def _patch_agent_run():
# type: () -> None
"""
Patches the Agent run methods to create spans for agent execution.
This patches both non-streaming (run, run_sync) and streaming
(run_stream, run_stream_events) methods.
"""

# Store original methods
original_run = Agent.run
original_run_sync = Agent.run_sync
original_run_stream = Agent.run_stream
original_run_stream_events = Agent.run_stream_events

# Wrap and apply patches for non-streaming methods
Agent.run = _create_run_wrapper(original_run, is_streaming=False) # type: ignore
Agent.run_sync = _create_run_sync_wrapper(original_run_sync) # type: ignore

# Wrap and apply patches for streaming methods
Agent.run_stream = _create_streaming_wrapper(original_run_stream) # type: ignore
Agent.run_stream_events = _create_streaming_events_wrapper( # type: ignore[method-assign]
original_run_stream_events
)
Loading
Loading