Skip to content

Commit 519dc0b

Browse files
committed
Working sanitized VCR for tests, but missing some assertions
1 parent 11a6dc2 commit 519dc0b

File tree

4 files changed

+164
-53
lines changed

4 files changed

+164
-53
lines changed
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
interactions:
2+
- request:
3+
body: "{\n \"contents\": [\n {\n \"role\": \"user\",\n \"parts\":
4+
[\n {\n \"fileData\": {\n \"mimeType\": \"image/jpeg\",\n
5+
\ \"fileUri\": \"gs://generativeai-downloads/images/scones.jpg\"\n
6+
\ }\n },\n {\n \"text\": \"what is shown in this
7+
image?\"\n }\n ]\n }\n ]\n}"
8+
headers:
9+
Accept:
10+
- '*/*'
11+
Accept-Encoding:
12+
- gzip, deflate
13+
Connection:
14+
- keep-alive
15+
Content-Length:
16+
- '317'
17+
Content-Type:
18+
- application/json
19+
User-Agent:
20+
- python-requests/2.32.3
21+
method: POST
22+
uri: https://us-central1-aiplatform.googleapis.com/v1beta1/projects/otel-starter-project/locations/us-central1/publishers/google/models/gemini-pro-vision:generateContent?%24alt=json%3Benum-encoding%3Dint
23+
response:
24+
body:
25+
string: "{\n \"candidates\": [\n {\n \"content\": {\n \"role\":
26+
\"model\",\n \"parts\": [\n {\n \"text\": \" The
27+
image shows a table with a cup of coffee, a bowl of blueberries, and a plate
28+
of scones with blueberries on top. There are also pink flowers on the table.\"\n
29+
\ }\n ]\n },\n \"finishReason\": 1,\n \"safetyRatings\":
30+
[\n {\n \"category\": 1,\n \"probability\": 1,\n
31+
\ \"probabilityScore\": 0.025512695,\n \"severity\": 1,\n
32+
\ \"severityScore\": 0.06933594\n },\n {\n \"category\":
33+
2,\n \"probability\": 1,\n \"probabilityScore\": 0.026367188,\n
34+
\ \"severity\": 1,\n \"severityScore\": 0.07080078\n },\n
35+
\ {\n \"category\": 3,\n \"probability\": 1,\n \"probabilityScore\":
36+
0.041503906,\n \"severity\": 1,\n \"severityScore\": 0.03466797\n
37+
\ },\n {\n \"category\": 4,\n \"probability\":
38+
1,\n \"probabilityScore\": 0.091308594,\n \"severity\":
39+
1,\n \"severityScore\": 0.09033203\n }\n ],\n \"avgLogprobs\":
40+
-0.095571068355015346\n }\n ],\n \"usageMetadata\": {\n \"promptTokenCount\":
41+
265,\n \"candidatesTokenCount\": 35,\n \"totalTokenCount\": 300\n },\n
42+
\ \"modelVersion\": \"gemini-pro-vision\"\n}\n"
43+
headers:
44+
Cache-Control:
45+
- private
46+
Content-Encoding:
47+
- gzip
48+
Content-Type:
49+
- application/json; charset=UTF-8
50+
Transfer-Encoding:
51+
- chunked
52+
Vary:
53+
- Origin
54+
- X-Origin
55+
- Referer
56+
status:
57+
code: 200
58+
message: OK
59+
version: 1

instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/conftest.py

Lines changed: 56 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
"""Unit tests configuration module."""
22

3+
import re
4+
from typing import Any, Mapping, MutableMapping
5+
36
import pytest
7+
from google.auth.credentials import AnonymousCredentials
48

59
from opentelemetry import trace
610
from opentelemetry.instrumentation.vertexai_v2 import VertexAIInstrumentor
@@ -12,6 +16,11 @@
1216

1317
pytest_plugins = []
1418

19+
import vertexai
20+
from vcr import VCR
21+
from vcr.record_mode import RecordMode
22+
from vcr.request import Request
23+
1524

1625
@pytest.fixture(scope="session")
1726
def exporter():
@@ -32,6 +41,52 @@ def clear_exporter(exporter):
3241
exporter.clear()
3342

3443

44+
@pytest.fixture(autouse=True)
45+
def vertexai_init(vcr: VCR) -> None:
46+
# Unfortunately I couldn't find a nice way to globally reset the global_config for each
47+
# test because different vertex submodules reference the global instance directly
48+
# https://github.com/googleapis/python-aiplatform/blob/v1.74.0/google/cloud/aiplatform/initializer.py#L687
49+
# so this config will leak if we don't call init() for each test.
50+
51+
# When not recording (in CI), don't do any auth. That prevents trying to read application
52+
# default credentials from the filesystem or metadata server and oauth token exchange. This
53+
# is not the interesting part of our instrumentation to test.
54+
if vcr.record_mode is RecordMode.NONE:
55+
vertexai.init(credentials=AnonymousCredentials())
56+
else:
57+
vertexai.init()
58+
59+
3560
@pytest.fixture(scope="module")
3661
def vcr_config():
37-
return {"filter_headers": ["authorization"]}
62+
filter_header_regexes = [
63+
r"X-.*",
64+
"Server",
65+
"Date",
66+
"Expires",
67+
"Authorization",
68+
]
69+
70+
def filter_headers(headers: Mapping[str, str]) -> Mapping[str, str]:
71+
return {
72+
key: val
73+
for key, val in headers.items()
74+
if not any(
75+
re.match(filter_re, key, re.IGNORECASE)
76+
for filter_re in filter_header_regexes
77+
)
78+
}
79+
80+
def before_record_cb(request: Request):
81+
request.headers = filter_headers(request.headers)
82+
return request
83+
84+
def before_response_cb(response: MutableMapping[str, Any]):
85+
response["headers"] = filter_headers(response["headers"])
86+
return response
87+
88+
return {
89+
"before_record_request": before_record_cb,
90+
"before_record_response": before_response_cb,
91+
"ignore_hosts": ["oauth2.googleapis.com"],
92+
}

instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/disabled_test_gemini.py

Lines changed: 0 additions & 52 deletions
This file was deleted.
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import pytest
2+
from vertexai.preview.generative_models import GenerativeModel, Part
3+
4+
# from opentelemetry.semconv_ai import SpanAttributes
5+
6+
7+
@pytest.mark.vcr
8+
def test_vertexai_generate_content(exporter):
9+
multimodal_model = GenerativeModel("gemini-pro-vision")
10+
response = multimodal_model.generate_content(
11+
[
12+
Part.from_uri(
13+
"gs://generativeai-downloads/images/scones.jpg",
14+
mime_type="image/jpeg",
15+
),
16+
"what is shown in this image?",
17+
]
18+
)
19+
20+
spans = exporter.get_finished_spans()
21+
assert [span.name for span in spans] == [
22+
"text_completion gemini-pro-vision"
23+
]
24+
25+
vertexai_span = spans[0]
26+
# assert (
27+
# "what is shown in this image?"
28+
# in vertexai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.user"]
29+
# )
30+
# assert (
31+
# vertexai_span.attributes[SpanAttributes.LLM_REQUEST_MODEL]
32+
# == "gemini-pro-vision"
33+
# )
34+
# assert (
35+
# vertexai_span.attributes[SpanAttributes.LLM_USAGE_TOTAL_TOKENS]
36+
# == response._raw_response.usage_metadata.total_token_count
37+
# )
38+
# assert (
39+
# vertexai_span.attributes[SpanAttributes.LLM_USAGE_PROMPT_TOKENS]
40+
# == response._raw_response.usage_metadata.prompt_token_count
41+
# )
42+
# assert (
43+
# vertexai_span.attributes[SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]
44+
# == response._raw_response.usage_metadata.candidates_token_count
45+
# )
46+
# assert (
47+
# vertexai_span.attributes[f"{SpanAttributes.LLM_COMPLETIONS}.0.content"]
48+
# == response.text
49+
# )

0 commit comments

Comments
 (0)