Skip to content

Commit 1c1ab3d

Browse files
committed
Uploader class for _ref events
WIP use newest iteration of semconv
1 parent 8ea69dc commit 1c1ab3d

File tree

7 files changed

+381
-110
lines changed

7 files changed

+381
-110
lines changed

instrumentation-genai/opentelemetry-instrumentation-google-genai/pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,10 @@ classifiers = [
3737
"Programming Language :: Python :: 3.12"
3838
]
3939
dependencies = [
40+
"fsspec>=2025.5.1",
4041
"opentelemetry-api >=1.31.1, <2",
4142
"opentelemetry-instrumentation >=0.52b1, <2",
42-
"opentelemetry-semantic-conventions >=0.52b1, <2"
43+
"opentelemetry-semantic-conventions >=0.52b1, <2",
4344
]
4445

4546
[project.optional-dependencies]

instrumentation-genai/opentelemetry-instrumentation-google-genai/src/opentelemetry/instrumentation/google_genai/generate_content.py

Lines changed: 23 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -12,25 +12,25 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
from __future__ import annotations
16+
1517
import copy
1618
import functools
1719
import json
1820
import logging
1921
import os
20-
from re import S
2122
import time
2223
from typing import Any, AsyncIterator, Awaitable, Iterator, Optional, Union
24+
from uuid import uuid4
2325

24-
from google.genai.models import AsyncModels, Models, t as transformers
26+
from google.genai.models import AsyncModels, Models
27+
from google.genai.models import t as transformers
2528
from google.genai.types import (
2629
BlockedReason,
2730
Candidate,
2831
Content,
2932
ContentListUnion,
30-
GenerateContentConfig,
31-
ContentUnion,
3233
ContentListUnionDict,
33-
Content,
3434
ContentUnion,
3535
ContentUnionDict,
3636
GenerateContentConfig,
@@ -49,14 +49,13 @@
4949
from .custom_semconv import GCP_GENAI_OPERATION_CONFIG
5050
from .dict_util import flatten_dict
5151
from .flags import is_content_recording_enabled
52-
from .otel_wrapper import OTelWrapper
53-
from .tool_call_wrapper import wrapped as wrapped_tool
5452
from .message import (
55-
ContentUnion,
5653
to_input_messages,
5754
to_output_message,
5855
to_system_instruction,
5956
)
57+
from .otel_wrapper import OTelWrapper
58+
from .tool_call_wrapper import wrapped as wrapped_tool
6059

6160
_logger = logging.getLogger(__name__)
6261

@@ -330,7 +329,7 @@ def process_completion(
330329
self._update_finish_reasons(response)
331330
self._maybe_update_token_counts(response)
332331
self._maybe_update_error_type(response)
333-
self._maybe_log_response(response)
332+
# self._maybe_log_response(response)
334333
self._response_index += 1
335334

336335
def process_error(self, e: Exception):
@@ -409,24 +408,17 @@ def _maybe_log_completion_details(
409408
request: Union[ContentListUnion, ContentListUnionDict],
410409
response: GenerateContentResponse,
411410
) -> None:
412-
def _transform_content(
413-
content: Union[
414-
ContentListUnion, ContentListUnionDict, Content, None
415-
],
416-
) -> list[Content]:
417-
if content is None:
418-
return []
419-
return transformers.t_contents(content)
420-
421411
attributes = {
422412
gen_ai_attributes.GEN_AI_SYSTEM: self._genai_system,
423413
}
424414

425-
system_instruction = to_system_instruction(
426-
contents=_transform_content(_config_to_system_instruction(config))
427-
)
415+
system_instruction = None
416+
if system_content := _config_to_system_instruction(config):
417+
system_instruction = to_system_instruction(
418+
content=transformers.t_contents(system_content)[0]
419+
)
428420
input_messages = to_input_messages(
429-
contents=_transform_content(request)
421+
contents=transformers.t_contents(request)
430422
)
431423
output_message = to_output_message(
432424
candidates=response.candidates or []
@@ -439,6 +431,15 @@ def _transform_content(
439431
attributes=attributes,
440432
)
441433

434+
# Forward looking remote storage refs
435+
self._otel_wrapper.log_completion_details_refs(
436+
system_instructions=system_instruction,
437+
input_messages=input_messages,
438+
output_messages=output_message,
439+
attributes=attributes,
440+
response_id=response.response_id or str(uuid4()),
441+
)
442+
442443
def _maybe_log_system_instruction(
443444
self, config: Optional[GenerateContentConfigOrDict] = None
444445
):

instrumentation-genai/opentelemetry-instrumentation-google-genai/src/opentelemetry/instrumentation/google_genai/message.py

Lines changed: 70 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -12,85 +12,83 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
from __future__ import annotations
16+
1517
import logging
18+
19+
from google.genai import types as genai_types
20+
1621
from .message_models import (
1722
BlobPart,
1823
ChatMessage,
19-
Choice,
2024
FileDataPart,
25+
FinishReason,
2126
InputMessages,
2227
MessagePart,
23-
SystemMessage,
28+
OutputMessage,
2429
OutputMessages,
30+
Role,
2531
TextPart,
2632
ToolCallPart,
2733
ToolCallResponsePart,
28-
UnknownPart,
29-
)
30-
31-
from google.genai.models import t as transfomers
32-
33-
from google.genai.types import (
34-
BlockedReason,
35-
Candidate,
36-
Content,
37-
ContentListUnion,
38-
ContentListUnionDict,
39-
ContentUnion,
40-
GenerateContentConfig,
41-
ContentUnionDict,
42-
GenerateContentConfig,
43-
GenerateContentConfigOrDict,
44-
GenerateContentResponse,
45-
Part,
46-
PartUnion,
4734
)
4835

4936
_logger = logging.getLogger(__name__)
5037

5138

5239
def to_input_messages(
5340
*,
54-
contents: list[Content],
41+
contents: list[genai_types.Content],
5542
) -> InputMessages:
56-
return InputMessages(
57-
messages=[_to_chat_message(content) for content in contents]
58-
)
43+
return InputMessages([_to_chat_message(content) for content in contents])
5944

6045

6146
def to_output_message(
6247
*,
63-
candidates: list[Candidate],
48+
candidates: list[genai_types.Candidate],
6449
) -> OutputMessages:
65-
choices = [
66-
Choice(
67-
finish_reason=candidate.finish_reason or "",
68-
message=_to_chat_message(candidate.content)
69-
if candidate.content
70-
else None,
50+
def content_to_output_message(
51+
candidate: genai_types.Candidate,
52+
) -> OutputMessage | None:
53+
if not candidate.content:
54+
return None
55+
56+
message = _to_chat_message(candidate.content)
57+
return OutputMessage(
58+
finish_reason=_to_finish_reason(candidate.finish_reason),
59+
role=message.role,
60+
parts=message.parts,
7161
)
72-
for candidate in candidates
73-
]
74-
return OutputMessages(choices=choices)
62+
63+
messages = (
64+
content_to_output_message(candidate) for candidate in candidates
65+
)
66+
return OutputMessages(
67+
[message for message in messages if message is not None]
68+
)
7569

7670

71+
# TODO: re-using ChatMessage for now but it is defined as any in
72+
# https://github.com/open-telemetry/semantic-conventions/pull/2179. I prefer ChatMessage.
7773
def to_system_instruction(
7874
*,
79-
contents: list[Content],
80-
) -> SystemMessage | None:
81-
return SystemMessage(
82-
messages=[_to_chat_message(content) for content in contents]
83-
)
75+
content: genai_types.Content,
76+
) -> ChatMessage | None:
77+
return _to_chat_message(content)
8478

8579

8680
def _to_chat_message(
87-
content: Content,
81+
content: genai_types.Content,
8882
) -> ChatMessage:
89-
parts = content.parts or []
90-
return ChatMessage(role="system", parts=[_to_part(part) for part in parts])
83+
parts = (_to_part(part) for part in (content.parts or []))
84+
return ChatMessage(
85+
role=_to_role(content.role),
86+
# filter Nones
87+
parts=[part for part in parts if part is not None],
88+
)
9189

9290

93-
def _to_part(part: Part) -> MessagePart:
91+
def _to_part(part: genai_types.Part) -> MessagePart | None:
9492
if (text := part.text) is not None:
9593
return TextPart(content=text)
9694

@@ -110,9 +108,35 @@ def _to_part(part: Part) -> MessagePart:
110108
if response := part.function_response:
111109
return ToolCallResponsePart(
112110
id=response.id or "",
113-
name=response.name or "",
114111
result=response.response,
115112
)
116113

117114
_logger.info("Unknown part dropped from telemetry %s", part)
118-
return UnknownPart()
115+
return None
116+
117+
118+
def _to_role(role: str | None) -> Role | str:
119+
if role == "user":
120+
return Role.USER
121+
elif role == "model":
122+
return Role.ASSISTANT
123+
return ""
124+
125+
126+
def _to_finish_reason(
127+
finish_reason: genai_types.FinishReason | None,
128+
) -> FinishReason | str:
129+
if finish_reason is None:
130+
return ""
131+
if (
132+
finish_reason is genai_types.FinishReason.FINISH_REASON_UNSPECIFIED
133+
or finish_reason is genai_types.FinishReason.OTHER
134+
):
135+
return FinishReason.ERROR
136+
if finish_reason is genai_types.FinishReason.STOP:
137+
return FinishReason.STOP
138+
if finish_reason is genai_types.FinishReason.MAX_TOKENS:
139+
return FinishReason.LENGTH
140+
141+
# If there is no 1:1 mapping to an OTel preferred enum value, use the exact vertex reason
142+
return finish_reason.name

0 commit comments

Comments
 (0)