Skip to content

Commit e6549cf

Browse files
authored
Add instrument_google_genai (#1217)
1 parent 46a9f39 commit e6549cf

File tree

12 files changed

+441
-14
lines changed

12 files changed

+441
-14
lines changed

logfire-api/logfire_api/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,8 @@ def instrument_openai(self, *args, **kwargs) -> ContextManager[None]:
146146

147147
def instrument_openai_agents(self, *args, **kwargs) -> None: ...
148148

149+
def instrument_google_genai(self, *args, **kwargs) -> None: ...
150+
149151
def instrument_aiohttp_client(self, *args, **kwargs) -> None: ...
150152

151153
def instrument_aiohttp_server(self, *args, **kwargs) -> None: ...
@@ -182,6 +184,7 @@ def shutdown(self, *args, **kwargs) -> None: ...
182184
instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai
183185
instrument_openai_agents = DEFAULT_LOGFIRE_INSTANCE.instrument_openai_agents
184186
instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic
187+
instrument_google_genai = DEFAULT_LOGFIRE_INSTANCE.instrument_google_genai
185188
instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg
186189
instrument_celery = DEFAULT_LOGFIRE_INSTANCE.instrument_celery
187190
instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx

logfire/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai
3535
instrument_openai_agents = DEFAULT_LOGFIRE_INSTANCE.instrument_openai_agents
3636
instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic
37+
instrument_google_genai = DEFAULT_LOGFIRE_INSTANCE.instrument_google_genai
3738
instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg
3839
instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx
3940
instrument_celery = DEFAULT_LOGFIRE_INSTANCE.instrument_celery
@@ -125,6 +126,7 @@ def loguru_handler() -> Any:
125126
'instrument_openai',
126127
'instrument_openai_agents',
127128
'instrument_anthropic',
129+
'instrument_google_genai',
128130
'instrument_asyncpg',
129131
'instrument_httpx',
130132
'instrument_celery',

logfire/_internal/exporters/processor_wrapper.py

Lines changed: 49 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from urllib.parse import parse_qs, urlparse
88

99
from opentelemetry import context
10-
from opentelemetry.sdk.trace import ReadableSpan, Span
10+
from opentelemetry.sdk.trace import Event, ReadableSpan, Span
1111
from opentelemetry.sdk.util.instrumentation import InstrumentationScope
1212
from opentelemetry.trace import SpanKind, Status, StatusCode
1313

@@ -26,6 +26,7 @@
2626
from ..scrubbing import BaseScrubber
2727
from ..utils import (
2828
ReadableSpanDict,
29+
handle_internal_errors,
2930
is_asgi_send_receive_span_name,
3031
is_instrumentation_suppressed,
3132
span_to_dict,
@@ -72,15 +73,18 @@ def on_start(
7273
super().on_start(span, parent_context)
7374

7475
def on_end(self, span: ReadableSpan) -> None:
75-
span_dict = span_to_dict(span)
76-
_tweak_asgi_send_receive_spans(span_dict)
77-
_tweak_sqlalchemy_connect_spans(span_dict)
78-
_tweak_http_spans(span_dict)
79-
_summarize_db_statement(span_dict)
80-
_set_error_level_and_status(span_dict)
81-
_transform_langchain_span(span_dict)
82-
self.scrubber.scrub_span(span_dict)
83-
span = ReadableSpan(**span_dict)
76+
with handle_internal_errors:
77+
span_dict = span_to_dict(span)
78+
_tweak_asgi_send_receive_spans(span_dict)
79+
_tweak_sqlalchemy_connect_spans(span_dict)
80+
_tweak_http_spans(span_dict)
81+
_summarize_db_statement(span_dict)
82+
_set_error_level_and_status(span_dict)
83+
_transform_langchain_span(span_dict)
84+
_transform_google_genai_span(span_dict)
85+
_default_gen_ai_response_model(span_dict)
86+
self.scrubber.scrub_span(span_dict)
87+
span = ReadableSpan(**span_dict)
8488
super().on_end(span)
8589

8690

@@ -404,3 +408,38 @@ def _transform_langchain_message(old_message: dict[str, Any]) -> dict[str, Any]:
404408
if 'tool_call_id' in result:
405409
result['id'] = result.pop('tool_call_id')
406410
return result
411+
412+
413+
def _default_gen_ai_response_model(span: ReadableSpanDict):
414+
attrs = span['attributes']
415+
if 'gen_ai.request.model' in attrs and 'gen_ai.response.model' not in attrs:
416+
span['attributes'] = {
417+
**attrs,
418+
'gen_ai.response.model': attrs['gen_ai.request.model'],
419+
}
420+
421+
422+
def _transform_google_genai_span(span: ReadableSpanDict):
423+
scope = span['instrumentation_scope']
424+
if not (scope and scope.name == 'opentelemetry.instrumentation.google_genai'):
425+
return
426+
427+
new_events: list[Event] = []
428+
events_attr: list[dict[str, Any]] = []
429+
for event in span['events']:
430+
if not (
431+
event.name.startswith('gen_ai.')
432+
and event.attributes
433+
and isinstance(event_attrs_string := event.attributes.get('event_body'), str)
434+
): # pragma: no cover
435+
new_events.append(event)
436+
continue
437+
event_attrs: dict[str, Any] = json.loads(event_attrs_string)
438+
events_attr.append(event_attrs)
439+
span['attributes'] = {
440+
**span['attributes'],
441+
'events': json.dumps(events_attr),
442+
'gen_ai.operation.name': 'chat',
443+
ATTRIBUTES_JSON_SCHEMA_KEY: attributes_json_schema(JsonSchemaProperties({'events': {'type': 'array'}})),
444+
}
445+
span['events'] = new_events
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
from __future__ import annotations
2+
3+
import json
4+
from typing import Any
5+
6+
from opentelemetry._events import Event, EventLogger, EventLoggerProvider
7+
from opentelemetry.instrumentation.google_genai import GoogleGenAiSdkInstrumentor
8+
from opentelemetry.trace import get_current_span
9+
10+
import logfire
11+
from logfire._internal.utils import handle_internal_errors
12+
13+
14+
class SpanEventLogger(EventLogger):
15+
@handle_internal_errors
16+
def emit(self, event: Event) -> None:
17+
span = get_current_span()
18+
assert isinstance(event.body, dict)
19+
body: dict[str, Any] = {**event.body}
20+
if event.name == 'gen_ai.choice':
21+
parts = body.pop('content')['parts']
22+
new_parts: list[dict[str, Any] | str] = []
23+
for part in parts:
24+
new_part: str | dict[str, Any] = {k: v for k, v in part.items() if v is not None}
25+
if list(new_part.keys()) == ['text']: # pragma: no branch
26+
new_part = new_part['text']
27+
new_parts.append(new_part)
28+
body['message'] = {'role': 'assistant', 'content': new_parts}
29+
else:
30+
body['role'] = body.get('role', event.name.split('.')[1])
31+
32+
span.add_event(event.name, attributes={'event_body': json.dumps(body)})
33+
34+
35+
class SpanEventLoggerProvider(EventLoggerProvider):
36+
def get_event_logger(self, *args: Any, **kwargs: Any) -> SpanEventLogger:
37+
return SpanEventLogger(*args, **kwargs)
38+
39+
40+
def instrument_google_genai(logfire_instance: logfire.Logfire):
41+
GoogleGenAiSdkInstrumentor().instrument(
42+
event_logger_provider=SpanEventLoggerProvider(),
43+
tracer_provider=logfire_instance.config.get_tracer_provider(),
44+
meter_provider=logfire_instance.config.get_meter_provider(),
45+
)

logfire/_internal/main.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1264,6 +1264,12 @@ def instrument_anthropic(
12641264
is_async_client,
12651265
)
12661266

1267+
def instrument_google_genai(self):
1268+
from .integrations.google_genai import instrument_google_genai
1269+
1270+
self._warn_if_not_initialized_for_instrumentation()
1271+
instrument_google_genai(self)
1272+
12671273
def instrument_asyncpg(self, **kwargs: Any) -> None:
12681274
"""Instrument the `asyncpg` module so that spans are automatically created for each query."""
12691275
from .integrations.asyncpg import instrument_asyncpg

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -174,6 +174,8 @@ dev = [
174174
"langchain>=0.0.27",
175175
"langchain-openai>=0.3.17",
176176
"langgraph >= 0",
177+
"opentelemetry-instrumentation-google-genai >= 0",
178+
"google-genai >= 0",
177179
]
178180
docs = [
179181
"black>=23.12.0",

tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,4 +175,4 @@ def multiple_credentials(tmp_path: Path) -> Path:
175175

176176
@pytest.fixture(scope='module')
177177
def vcr_config():
178-
return {'filter_headers': ['authorization', 'cookie', 'Set-Cookie']}
178+
return {'filter_headers': ['authorization', 'cookie', 'Set-Cookie', 'x-goog-api-key']}
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
interactions:
2+
- request:
3+
body: '{"contents": [{"parts": [{"text": "What is the weather like in Boston?"}],
4+
"role": "user"}], "tools": [{"functionDeclarations": [{"description": "Returns
5+
the current weather.\n\nArgs:\n location: The city and state, e.g. San Francisco,
6+
CA", "name": "get_current_weather", "parameters": {"properties": {"location":
7+
{"type": "STRING"}}, "required": ["location"], "type": "OBJECT"}}]}], "generationConfig":
8+
{}}'
9+
headers:
10+
accept:
11+
- '*/*'
12+
accept-encoding:
13+
- gzip, deflate, zstd
14+
connection:
15+
- keep-alive
16+
content-length:
17+
- '408'
18+
content-type:
19+
- application/json
20+
host:
21+
- generativelanguage.googleapis.com
22+
user-agent:
23+
- google-genai-sdk/1.23.0 gl-python/3.12.6
24+
x-goog-api-client:
25+
- google-genai-sdk/1.23.0 gl-python/3.12.6
26+
method: POST
27+
uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash-001:generateContent
28+
response:
29+
body:
30+
string: !!binary |
31+
H4sIAAAAAAAC/61SXU+DMBR951eQPq8LsLGJb3Mao3FxUTQmxixXuGNkpSVt0Zhl/93ytcHUN/tA
32+
yj2n59zb051l2yQCHqcxaFTk3H41FdveVd8SE1wj1wZoS6aYg9RHbr12nb2hrAse6VTwOTDWO9zg
33+
HDI0dZKgXkWFlMZj9YmgNyjJ4JQMMlG/iBiEiQhKm1LqQigt+MBezMgJcW/99Xfcvx1diRSsai4T
34+
MbJWbN8SyDrlqdo8IKja+DG8Xx6aJvCR3Ikkl+K97Jn6w4k39gL/bDTyXMf1nSlSZ2K15pUtKRQk
35+
uEANJgU4TEqMSJbrUGyRz0VRpTD2aqNOaD08aGAtNLAe4ruDH6rq0nimrJtlJ2YzPrBUf5Uzhlcv
36+
YScYo99rqr0jq3OVpy3+k1nQ97KaZOqwnlGq5jkkmJmcqDd06JqB2lDHcStRIlHlgiu8iUsenekt
37+
3I7m1zTLpnG0zCO6UrMnYu2tb2/EfFweAwAA
38+
headers:
39+
Alt-Svc:
40+
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
41+
Content-Encoding:
42+
- gzip
43+
Content-Type:
44+
- application/json; charset=UTF-8
45+
Date:
46+
- Tue, 01 Jul 2025 16:25:28 GMT
47+
Server:
48+
- scaffolding on HTTPServer2
49+
Server-Timing:
50+
- gfet4t7; dur=1472
51+
Transfer-Encoding:
52+
- chunked
53+
Vary:
54+
- Origin
55+
- X-Origin
56+
- Referer
57+
X-Content-Type-Options:
58+
- nosniff
59+
X-Frame-Options:
60+
- SAMEORIGIN
61+
X-XSS-Protection:
62+
- '0'
63+
status:
64+
code: 200
65+
message: OK
66+
- request:
67+
body: '{"contents": [{"parts": [{"text": "What is the weather like in Boston?"}],
68+
"role": "user"}, {"parts": [{"functionCall": {"args": {"location": "Boston,
69+
MA"}, "name": "get_current_weather"}}], "role": "model"}, {"parts": [{"functionResponse":
70+
{"name": "get_current_weather", "response": {"result": "rainy"}}}], "role":
71+
"user"}], "tools": [{"functionDeclarations": [{"description": "Returns the current
72+
weather.\n\nArgs:\n location: The city and state, e.g. San Francisco, CA",
73+
"name": "get_current_weather", "parameters": {"properties": {"location": {"type":
74+
"STRING"}}, "required": ["location"], "type": "OBJECT"}}]}], "generationConfig":
75+
{}}'
76+
headers:
77+
accept:
78+
- '*/*'
79+
accept-encoding:
80+
- gzip, deflate, zstd
81+
connection:
82+
- keep-alive
83+
content-length:
84+
- '642'
85+
content-type:
86+
- application/json
87+
host:
88+
- generativelanguage.googleapis.com
89+
user-agent:
90+
- google-genai-sdk/1.23.0 gl-python/3.12.6
91+
x-goog-api-client:
92+
- google-genai-sdk/1.23.0 gl-python/3.12.6
93+
method: POST
94+
uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash-001:generateContent
95+
response:
96+
body:
97+
string: !!binary |
98+
H4sIAAAAAAAC/61RTU/DMAy991dEOa9Tu+9yGxtClTYYUCEk4GDWrIvaJVXjIsa0/07SrlsKV3KI
99+
LL9nP/v54BBC1yBiHgMyRa/Iq84Qcqh+g0mBTKAGmpRO5lDghVu/gxVrCrIvU0RDJFyRArjYEy7I
100+
tVQoRYcsp903Qa2S4zl+71yECpkx02UnY5Y19GNDoBsuuNo+MlBSGNpTdL+iZxQ+k4VM8kJ+mFld
101+
r+v5g/Gk3w+Go5HfG0x8z3ca6UqUlgoStmQI2gw4r0x1i12OkUyZmMmyMmM4qWUs71p4cIJRImQt
102+
ZDTu/Omq5lqTZ7alltt6ecg47s2G0c1LRC2DsD1U45BjGfl7xH8SC9pazuku9ameWaF4fZOE7fSV
103+
3F7XczcZqK3reX7VlBZM5VIoFsaG5z5gCov57Swso3G8Xqm78rucptQ5Oj82IczLpQIAAA==
104+
headers:
105+
Alt-Svc:
106+
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
107+
Content-Encoding:
108+
- gzip
109+
Content-Type:
110+
- application/json; charset=UTF-8
111+
Date:
112+
- Tue, 01 Jul 2025 16:25:29 GMT
113+
Server:
114+
- scaffolding on HTTPServer2
115+
Server-Timing:
116+
- gfet4t7; dur=500
117+
Transfer-Encoding:
118+
- chunked
119+
Vary:
120+
- Origin
121+
- X-Origin
122+
- Referer
123+
X-Content-Type-Options:
124+
- nosniff
125+
X-Frame-Options:
126+
- SAMEORIGIN
127+
X-XSS-Protection:
128+
- '0'
129+
status:
130+
code: 200
131+
message: OK
132+
version: 1

0 commit comments

Comments
 (0)