Skip to content

Commit e265d62

Browse files
committed
feat(ai): Add python-genai integration
1 parent 87f8f39 commit e265d62

File tree

10 files changed

+2119
-0
lines changed

10 files changed

+2119
-0
lines changed

pyproject.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,10 @@ ignore_missing_imports = true
118118
module = "langgraph.*"
119119
ignore_missing_imports = true
120120

121+
[[tool.mypy.overrides]]
122+
module = "google.genai.*"
123+
ignore_missing_imports = true
124+
121125
[[tool.mypy.overrides]]
122126
module = "executing.*"
123127
ignore_missing_imports = true

scripts/populate_tox/config.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,13 @@
143143
"package": "gql[all]",
144144
"num_versions": 2,
145145
},
146+
"google_genai": {
147+
"package": "google-genai",
148+
"deps": {
149+
"*": ["pytest-asyncio"],
150+
},
151+
"python": ">=3.8",
152+
},
146153
"graphene": {
147154
"package": "graphene",
148155
"deps": {

sentry_sdk/integrations/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@ def iter_default_integrations(with_auto_enabling_integrations):
9191
"sentry_sdk.integrations.flask.FlaskIntegration",
9292
"sentry_sdk.integrations.gql.GQLIntegration",
9393
"sentry_sdk.integrations.graphene.GrapheneIntegration",
94+
"sentry_sdk.integrations.google_genai.GoogleGenAIIntegration",
9495
"sentry_sdk.integrations.httpx.HttpxIntegration",
9596
"sentry_sdk.integrations.huey.HueyIntegration",
9697
"sentry_sdk.integrations.huggingface_hub.HuggingfaceHubIntegration",
@@ -140,6 +141,7 @@ def iter_default_integrations(with_auto_enabling_integrations):
140141
"flask": (1, 1, 4),
141142
"gql": (3, 4, 1),
142143
"graphene": (3, 3),
144+
"google_genai": (1, 0, 0), # google-genai
143145
"grpc": (1, 32, 0), # grpcio
144146
"httpx": (0, 16, 0),
145147
"huggingface_hub": (0, 24, 7),
Lines changed: 290 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,290 @@
1+
from functools import wraps
2+
from typing import (
3+
Any,
4+
AsyncIterator,
5+
Callable,
6+
Iterator,
7+
List,
8+
)
9+
10+
import sentry_sdk
11+
from sentry_sdk.integrations import DidNotEnable, Integration
12+
from sentry_sdk.consts import OP, SPANDATA
13+
14+
15+
try:
16+
from google import genai
17+
from google.genai.models import Models, AsyncModels
18+
except ImportError:
19+
raise DidNotEnable("google-genai not installed")
20+
21+
22+
from .consts import IDENTIFIER, ORIGIN, GEN_AI_SYSTEM
23+
from .utils import (
24+
set_span_data_for_request,
25+
set_span_data_for_response,
26+
capture_exception,
27+
)
28+
from .streaming import (
29+
set_span_data_for_streaming_response,
30+
accumulate_streaming_response,
31+
prepare_generate_content_args,
32+
)
33+
34+
35+
class GoogleGenAIIntegration(Integration):
36+
identifier = IDENTIFIER
37+
origin = ORIGIN
38+
39+
def __init__(self, include_prompts=True):
40+
# type: (GoogleGenAIIntegration, bool) -> None
41+
self.include_prompts = include_prompts
42+
43+
@staticmethod
44+
def setup_once():
45+
# type: () -> None
46+
# Patch sync methods
47+
Models.generate_content = _wrap_generate_content(Models.generate_content)
48+
Models.generate_content_stream = _wrap_generate_content_stream(
49+
Models.generate_content_stream
50+
)
51+
52+
# Patch async methods
53+
AsyncModels.generate_content = _wrap_async_generate_content(
54+
AsyncModels.generate_content
55+
)
56+
AsyncModels.generate_content_stream = _wrap_async_generate_content_stream(
57+
AsyncModels.generate_content_stream
58+
)
59+
60+
61+
def _wrap_generate_content_stream(f):
62+
# type: (Callable[..., Any]) -> Callable[..., Any]
63+
@wraps(f)
64+
def new_generate_content_stream(self, *args, **kwargs):
65+
# type: (Any, Any, Any) -> Any
66+
integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration)
67+
if integration is None:
68+
return f(self, *args, **kwargs)
69+
70+
_model, contents, model_name = prepare_generate_content_args(args, kwargs)
71+
72+
span = sentry_sdk.start_span(
73+
op=OP.GEN_AI_INVOKE_AGENT,
74+
name="invoke_agent",
75+
origin=ORIGIN,
76+
)
77+
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
78+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
79+
set_span_data_for_request(span, integration, model_name, contents, kwargs)
80+
span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
81+
82+
chat_span = sentry_sdk.start_span(
83+
op=OP.GEN_AI_CHAT,
84+
name=f"chat {model_name}",
85+
origin=ORIGIN,
86+
)
87+
chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
88+
chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM)
89+
chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
90+
set_span_data_for_request(chat_span, integration, model_name, contents, kwargs)
91+
chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
92+
93+
try:
94+
stream = f(self, *args, **kwargs)
95+
96+
# Create wrapper iterator to accumulate responses
97+
def new_iterator():
98+
# type: () -> Iterator[Any]
99+
chunks = [] # type: List[Any]
100+
try:
101+
for chunk in stream:
102+
chunks.append(chunk)
103+
yield chunk
104+
except Exception as exc:
105+
capture_exception(exc)
106+
raise
107+
finally:
108+
# Accumulate all chunks and set final response data on spans
109+
if chunks:
110+
accumulated_response = accumulate_streaming_response(chunks)
111+
set_span_data_for_streaming_response(
112+
chat_span, integration, accumulated_response
113+
)
114+
set_span_data_for_streaming_response(
115+
span, integration, accumulated_response
116+
)
117+
chat_span.finish()
118+
span.finish()
119+
120+
return new_iterator()
121+
122+
except Exception as exc:
123+
capture_exception(exc)
124+
chat_span.finish()
125+
span.finish()
126+
raise
127+
128+
return new_generate_content_stream
129+
130+
131+
def _wrap_async_generate_content_stream(f):
132+
# type: (Callable[..., Any]) -> Callable[..., Any]
133+
@wraps(f)
134+
async def new_async_generate_content_stream(self, *args, **kwargs):
135+
# type: (Any, Any, Any) -> Any
136+
integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration)
137+
if integration is None:
138+
return await f(self, *args, **kwargs)
139+
140+
_model, contents, model_name = prepare_generate_content_args(args, kwargs)
141+
142+
span = sentry_sdk.start_span(
143+
op=OP.GEN_AI_INVOKE_AGENT,
144+
name="invoke_agent",
145+
origin=ORIGIN,
146+
)
147+
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
148+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
149+
set_span_data_for_request(span, integration, model_name, contents, kwargs)
150+
span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
151+
152+
chat_span = sentry_sdk.start_span(
153+
op=OP.GEN_AI_CHAT,
154+
name=f"chat {model_name}",
155+
origin=ORIGIN,
156+
)
157+
chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
158+
chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM)
159+
chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
160+
set_span_data_for_request(chat_span, integration, model_name, contents, kwargs)
161+
chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
162+
163+
try:
164+
stream = await f(self, *args, **kwargs)
165+
166+
# Create wrapper async iterator to accumulate responses
167+
async def new_async_iterator():
168+
# type: () -> AsyncIterator[Any]
169+
chunks = [] # type: List[Any]
170+
try:
171+
async for chunk in stream:
172+
chunks.append(chunk)
173+
yield chunk
174+
except Exception as exc:
175+
capture_exception(exc)
176+
raise
177+
finally:
178+
# Accumulate all chunks and set final response data on spans
179+
if chunks:
180+
accumulated_response = accumulate_streaming_response(chunks)
181+
set_span_data_for_streaming_response(
182+
chat_span, integration, accumulated_response
183+
)
184+
set_span_data_for_streaming_response(
185+
span, integration, accumulated_response
186+
)
187+
chat_span.finish()
188+
span.finish()
189+
190+
return new_async_iterator()
191+
192+
except Exception as exc:
193+
capture_exception(exc)
194+
chat_span.finish()
195+
span.finish()
196+
raise
197+
198+
return new_async_generate_content_stream
199+
200+
201+
def _wrap_generate_content(f):
202+
# type: (Callable[..., Any]) -> Callable[..., Any]
203+
@wraps(f)
204+
def new_generate_content(self, *args, **kwargs):
205+
# type: (Any, Any, Any) -> Any
206+
integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration)
207+
if integration is None:
208+
return f(self, *args, **kwargs)
209+
210+
model, contents, model_name = prepare_generate_content_args(args, kwargs)
211+
212+
with sentry_sdk.start_span(
213+
op=OP.GEN_AI_INVOKE_AGENT,
214+
name="invoke_agent",
215+
origin=ORIGIN,
216+
) as span:
217+
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
218+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
219+
set_span_data_for_request(span, integration, model_name, contents, kwargs)
220+
221+
try:
222+
with sentry_sdk.start_span(
223+
op=OP.GEN_AI_CHAT,
224+
name=f"chat {model_name}",
225+
origin=ORIGIN,
226+
) as chat_span:
227+
chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
228+
chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM)
229+
chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
230+
set_span_data_for_request(
231+
chat_span, integration, model_name, contents, kwargs
232+
)
233+
234+
response = f(self, *args, **kwargs)
235+
236+
set_span_data_for_response(chat_span, integration, response)
237+
set_span_data_for_response(span, integration, response)
238+
239+
return response
240+
except Exception as exc:
241+
capture_exception(exc)
242+
raise
243+
244+
return new_generate_content
245+
246+
247+
def _wrap_async_generate_content(f):
248+
# type: (Callable[..., Any]) -> Callable[..., Any]
249+
@wraps(f)
250+
async def new_async_generate_content(self, *args, **kwargs):
251+
# type: (Any, Any, Any) -> Any
252+
integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration)
253+
if integration is None:
254+
return await f(self, *args, **kwargs)
255+
256+
model, contents, model_name = prepare_generate_content_args(args, kwargs)
257+
258+
with sentry_sdk.start_span(
259+
op=OP.GEN_AI_INVOKE_AGENT,
260+
name="invoke_agent",
261+
origin=ORIGIN,
262+
) as span:
263+
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
264+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
265+
set_span_data_for_request(span, integration, model_name, contents, kwargs)
266+
267+
try:
268+
with sentry_sdk.start_span(
269+
op=OP.GEN_AI_CHAT,
270+
name=f"chat {model_name}",
271+
origin=ORIGIN,
272+
) as chat_span:
273+
chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
274+
chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM)
275+
chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
276+
set_span_data_for_request(
277+
chat_span, integration, model_name, contents, kwargs
278+
)
279+
280+
response = await f(self, *args, **kwargs)
281+
282+
set_span_data_for_response(chat_span, integration, response)
283+
set_span_data_for_response(span, integration, response)
284+
285+
return response
286+
except Exception as exc:
287+
capture_exception(exc)
288+
raise
289+
290+
return new_async_generate_content
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
GEN_AI_SYSTEM = "gcp.gemini"
2+
3+
# Mapping of tool attributes to their descriptions
4+
# These are all tools that are available in the Google GenAI API
5+
TOOL_ATTRIBUTES_MAP = {
6+
"google_search_retrieval": "Google Search retrieval tool",
7+
"google_search": "Google Search tool",
8+
"retrieval": "Retrieval tool",
9+
"enterprise_web_search": "Enterprise web search tool",
10+
"google_maps": "Google Maps tool",
11+
"code_execution": "Code execution tool",
12+
"computer_use": "Computer use tool",
13+
}
14+
15+
IDENTIFIER = "google_genai"
16+
ORIGIN = f"auto.ai.{IDENTIFIER}"

0 commit comments

Comments
 (0)