Skip to content

Commit 3670fbd

Browse files
authored
Merge branch 'main' into script-pr
2 parents 0e9f8ac + 40932c2 commit 3670fbd

File tree

11 files changed

+378
-38
lines changed

11 files changed

+378
-38
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1717
([#3624](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3624))
1818
- `opentelemetry-instrumentation-dbapi`: fix crash retrieving libpq version when enabling commenter with psycopg
1919
([#3796](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3796))
20+
- `opentelemetry-instrumentation-fastapi`: Fix handling of APIRoute subclasses
21+
([#3681](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3681))
2022

2123
### Added
2224

instrumentation-genai/opentelemetry-instrumentation-vertexai/CHANGELOG.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,10 @@ users will need to set the environment variable OTEL_SEMCONV_STABILITY_OPT_IN to
1414
([#3328](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3328))
1515
- VertexAI support for async calling
1616
([#3386](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3386))
17-
- `opentelemetry-instrumentation-vertexai`: migrate off the deprecated events API to use the logs API
17+
- Migrate off the deprecated events API to use the logs API
1818
([#3625](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3626))
19+
- Update `gen_ai_latest_experimental` instrumentation to record files being passed to the model
20+
([#3840](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3840)).
1921

2022
## Version 2.0b0 (2025-02-24)
2123

instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/utils.py

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
from __future__ import annotations
1818

19+
import logging
1920
import re
2021
from dataclasses import dataclass
2122
from os import environ
@@ -308,6 +309,23 @@ def request_to_events(
308309
yield user_event(role=content.role, content=request_content)
309310

310311

312+
@dataclass
313+
class BlobPart:
314+
data: bytes
315+
mime_type: str
316+
type: Literal["blob"] = "blob"
317+
318+
319+
@dataclass
320+
class FileDataPart:
321+
mime_type: str
322+
uri: str
323+
type: Literal["file_data"] = "file_data"
324+
325+
class Config:
326+
extra = "allow"
327+
328+
311329
def convert_content_to_message_parts(
312330
content: content.Content | content_v1beta1.Content,
313331
) -> list[MessagePart]:
@@ -334,12 +352,20 @@ def convert_content_to_message_parts(
334352
)
335353
elif "text" in part:
336354
parts.append(Text(content=part.text))
337-
else:
338-
dict_part = type(part).to_dict( # type: ignore[reportUnknownMemberType]
339-
part, always_print_fields_with_no_presence=False
355+
elif "inline_data" in part:
356+
part = part.inline_data
357+
parts.append(
358+
BlobPart(mime_type=part.mime_type or "", data=part.data or b"")
359+
)
360+
elif "file_data" in part:
361+
part = part.file_data
362+
parts.append(
363+
FileDataPart(
364+
mime_type=part.mime_type or "", uri=part.file_uri or ""
365+
)
340366
)
341-
dict_part["type"] = type(part)
342-
parts.append(dict_part)
367+
else:
368+
logging.warning("Unknown part dropped from telemetry %s", part)
343369
return parts
344370

345371

Lines changed: 102 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,102 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"contents": [
6+
{
7+
"role": "user",
8+
"parts": [
9+
{
10+
"text": "Say this is a test"
11+
},
12+
{
13+
"fileData": {
14+
"mimeType": "image/jpeg",
15+
"fileUri": "https://images.pdimagearchive.org/collections/microscopic-delights/1lede-0021.jpg"
16+
}
17+
},
18+
{
19+
"inlineData": {
20+
"mimeType": "image/jpeg",
21+
"data": "iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=="
22+
}
23+
}
24+
]
25+
}
26+
]
27+
}
28+
headers:
29+
Accept:
30+
- '*/*'
31+
Accept-Encoding:
32+
- gzip, deflate
33+
Connection:
34+
- keep-alive
35+
Content-Length:
36+
- '554'
37+
Content-Type:
38+
- application/json
39+
User-Agent:
40+
- python-requests/2.32.3
41+
method: POST
42+
uri: https://us-central1-aiplatform.googleapis.com/v1/projects/fake-project/locations/us-central1/publishers/google/models/gemini-2.5-pro:generateContent?%24alt=json%3Benum-encoding%3Dint
43+
response:
44+
body:
45+
string: |-
46+
{
47+
"candidates": [
48+
{
49+
"content": {
50+
"role": "model",
51+
"parts": [
52+
{
53+
"text": "This is a test."
54+
}
55+
]
56+
},
57+
"finishReason": 1,
58+
"avgLogprobs": -24.462081909179688
59+
}
60+
],
61+
"usageMetadata": {
62+
"promptTokenCount": 521,
63+
"candidatesTokenCount": 5,
64+
"totalTokenCount": 950,
65+
"trafficType": 1,
66+
"promptTokensDetails": [
67+
{
68+
"modality": 2,
69+
"tokenCount": 516
70+
},
71+
{
72+
"modality": 1,
73+
"tokenCount": 5
74+
}
75+
],
76+
"candidatesTokensDetails": [
77+
{
78+
"modality": 1,
79+
"tokenCount": 5
80+
}
81+
],
82+
"thoughtsTokenCount": 424
83+
},
84+
"modelVersion": "gemini-2.5-pro",
85+
"createTime": "2025-10-13T16:29:47.639271Z",
86+
"responseId": "-yjtaKeCJ5KYmecP76S4-AI"
87+
}
88+
headers:
89+
Content-Type:
90+
- application/json; charset=UTF-8
91+
Transfer-Encoding:
92+
- chunked
93+
Vary:
94+
- Origin
95+
- X-Origin
96+
- Referer
97+
content-length:
98+
- '808'
99+
status:
100+
code: 200
101+
message: OK
102+
version: 1

instrumentation-genai/opentelemetry-instrumentation-vertexai/tests/test_chat_completions_experimental.py

Lines changed: 30 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
Content,
77
GenerationConfig,
88
GenerativeModel,
9+
Image,
910
Part,
1011
)
1112
from vertexai.preview.generative_models import (
@@ -24,7 +25,7 @@
2425

2526

2627
@pytest.mark.vcr()
27-
def test_generate_content(
28+
def test_generate_content_with_files(
2829
span_exporter: InMemorySpanExporter,
2930
log_exporter: InMemoryLogExporter,
3031
generate_content: callable,
@@ -38,6 +39,15 @@ def test_generate_content(
3839
role="user",
3940
parts=[
4041
Part.from_text("Say this is a test"),
42+
Part.from_uri(
43+
mime_type="image/jpeg",
44+
uri="https://images.pdimagearchive.org/collections/microscopic-delights/1lede-0021.jpg",
45+
),
46+
Part.from_image(
47+
Image.from_bytes(
48+
"iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=="
49+
)
50+
),
4151
],
4252
),
4353
],
@@ -52,36 +62,48 @@ def test_generate_content(
5262
"gen_ai.request.model": "gemini-2.5-pro",
5363
"gen_ai.response.finish_reasons": ("stop",),
5464
"gen_ai.response.model": "gemini-2.5-pro",
55-
"gen_ai.usage.input_tokens": 5,
65+
"gen_ai.usage.input_tokens": 521,
5666
"gen_ai.usage.output_tokens": 5,
5767
"server.address": "us-central1-aiplatform.googleapis.com",
5868
"server.port": 443,
59-
"gen_ai.input.messages": '[{"role":"user","parts":[{"content":"Say this is a test","type":"text"}]}]',
69+
"gen_ai.input.messages": '[{"role":"user","parts":[{"content":"Say this is a test","type":"text"},{"mime_type":"image/jpeg","uri":"https://images.pdimagearchive.org/collections/microscopic-delights/1lede-0021.jpg","type":"file_data"},{"data":"iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==","mime_type":"image/jpeg","type":"blob"}]}]',
6070
"gen_ai.output.messages": '[{"role":"model","parts":[{"content":"This is a test.","type":"text"}],"finish_reason":"stop"}]',
6171
}
6272

6373
logs = log_exporter.get_finished_logs()
6474
assert len(logs) == 1
6575
log = logs[0].log_record
6676
assert log.attributes == {
67-
"gen_ai.operation.name": "chat",
68-
"gen_ai.request.model": "gemini-2.5-pro",
6977
"server.address": "us-central1-aiplatform.googleapis.com",
7078
"server.port": 443,
79+
"gen_ai.operation.name": "chat",
80+
"gen_ai.request.model": "gemini-2.5-pro",
7181
"gen_ai.response.model": "gemini-2.5-pro",
7282
"gen_ai.response.finish_reasons": ("stop",),
73-
"gen_ai.usage.input_tokens": 5,
83+
"gen_ai.usage.input_tokens": 521,
7484
"gen_ai.usage.output_tokens": 5,
7585
"gen_ai.input.messages": (
7686
{
7787
"role": "user",
78-
"parts": ({"type": "text", "content": "Say this is a test"},),
88+
"parts": (
89+
{"content": "Say this is a test", "type": "text"},
90+
{
91+
"mime_type": "image/jpeg",
92+
"uri": "https://images.pdimagearchive.org/collections/microscopic-delights/1lede-0021.jpg",
93+
"type": "file_data",
94+
},
95+
{
96+
"data": b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x05\x00\x00\x00\x05\x08\x06\x00\x00\x00\x8do&\xe5\x00\x00\x00\x1cIDAT\x08\xd7c\xf8\xff\xff?\xc3\x7f\x06 \x05\xc3 \x12\x84\xd01\xf1\x82X\xcd\x04\x00\x0e\xf55\xcb\xd1\x8e\x0e\x1f\x00\x00\x00\x00IEND\xaeB`\x82",
97+
"mime_type": "image/jpeg",
98+
"type": "blob",
99+
},
100+
),
79101
},
80102
),
81103
"gen_ai.output.messages": (
82104
{
83105
"role": "model",
84-
"parts": ({"type": "text", "content": "This is a test."},),
106+
"parts": ({"content": "This is a test.", "type": "text"},),
85107
"finish_reason": "stop",
86108
},
87109
),

instrumentation/opentelemetry-instrumentation-fastapi/src/opentelemetry/instrumentation/fastapi/__init__.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ def client_response_hook(span: Span, scope: dict[str, Any], message: dict[str, A
191191
import fastapi
192192
from starlette.applications import Starlette
193193
from starlette.middleware.errors import ServerErrorMiddleware
194-
from starlette.routing import Match
194+
from starlette.routing import Match, Route
195195
from starlette.types import ASGIApp, Receive, Scope, Send
196196

197197
from opentelemetry.instrumentation._semconv import (
@@ -474,7 +474,11 @@ def _get_route_details(scope):
474474
route = None
475475

476476
for starlette_route in app.routes:
477-
match, _ = starlette_route.matches(scope)
477+
match, _ = (
478+
Route.matches(starlette_route, scope)
479+
if isinstance(starlette_route, Route)
480+
else starlette_route.matches(scope)
481+
)
478482
if match == Match.FULL:
479483
try:
480484
route = starlette_route.path

0 commit comments

Comments
 (0)