Skip to content

Commit 038f90e

Browse files
committed
Improve data model and add some rudimentary type checking.
1 parent c006346 commit 038f90e

File tree

7 files changed

+100
-28
lines changed

7 files changed

+100
-28
lines changed

instrumentation-genai/opentelemetry-instrumentation-google-genai/Makefile

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,9 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
.PHONY: all test install clean lint
15+
.PHONY: all test install clean lint typecheck
1616

17-
all: build test lint
17+
all: build test lint typecheck
1818

1919
test:
2020
./tools/test.sh
@@ -27,6 +27,8 @@ build: ./dist/opentelemetry_instrumentation_google_genai-0.0.1-py3-none-any.whl
2727
install: ./dist/opentelemetry_instrumentation_google_genai-0.0.1-py3-none-any.whl
2828
pip install ./dist/opentelemetry_instrumentation_google_genai-0.0.1-py3-none-any.whl
2929

30+
typecheck:
31+
./tools/typecheck.sh
3032

3133
lint:
3234
./tools/lint.sh

instrumentation-genai/opentelemetry-instrumentation-google-genai/pyproject.toml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,3 +65,15 @@ include = [
6565

6666
[tool.hatch.build.targets.wheel]
6767
packages = ["src/opentelemetry"]
68+
69+
[tool.pyright]
70+
include = [
71+
"src",
72+
]
73+
exclude = [
74+
"**/__pycache__",
75+
]
76+
stubPath = "types"
77+
reportMissingImports = "error"
78+
reportMissingTypeStubs = false
79+
pythonVersion = "3.9"

instrumentation-genai/opentelemetry-instrumentation-google-genai/src/opentelemetry/instrumentation/google_genai/generate_content.py

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,12 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import asyncio
1516
import functools
1617
import logging
1718
import os
1819
import time
19-
from typing import AsyncIterator, Awaitable, Iterator, Optional, Union
20+
from typing import Any, AsyncIterator, Awaitable, Iterator, Optional, Union
2021

2122
from google.genai.models import AsyncModels, Models
2223
from google.genai.types import (
@@ -29,6 +30,7 @@
2930

3031
from opentelemetry import trace
3132
from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
33+
from opentelemetry.semconv._incubating.attributes import code_attributes
3234
from opentelemetry.semconv.attributes import error_attributes
3335

3436
from .flags import is_content_recording_enabled
@@ -120,11 +122,11 @@ def _determine_genai_system(models_object: Union[Models, AsyncModels]):
120122

121123
def _get_config_property(
122124
config: Optional[GenerateContentConfigOrDict],
123-
path: str):
125+
path: str) -> Any:
124126
if config is None:
125127
return None
126128
path_segments = path.split(".")
127-
current_context = config
129+
current_context: Any = config
128130
for path_segment in path_segments:
129131
if current_context is None:
130132
return None
@@ -186,11 +188,12 @@ def __init__(
186188
self._output_tokens = 0
187189
self._content_recording_enabled = is_content_recording_enabled()
188190

189-
def start_span_as_current_span(self, name):
191+
def start_span_as_current_span(self, model_name, function_name):
190192
return self._otel_wrapper.start_as_current_span(
191-
name,
193+
f'generate_content [{model_name}]',
192194
start_time=self._start_time,
193195
attributes={
196+
code_attributes.CODE_FUNCTION_NAME: function_name,
194197
gen_ai_attributes.GEN_AI_SYSTEM: self._genai_system,
195198
gen_ai_attributes.GEN_AI_REQUEST_MODEL: self._genai_request_model,
196199
gen_ai_attributes.GEN_AI_OPERATION_NAME: "GenerateContent",
@@ -230,9 +233,9 @@ def finalize_processing(self):
230233
def _maybe_update_token_counts(self, response: GenerateContentResponse):
231234
input_tokens = _get_response_property(response, "usage_metadata.prompt_token_count")
232235
output_tokens = _get_response_property(response, "usage_metadata.candidates_token_count")
233-
if input_tokens:
236+
if input_tokens and isinstance(input_tokens, int):
234237
self._input_tokens += input_tokens
235-
if output_tokens:
238+
if output_tokens and isinstance(output_tokens, int):
236239
self._output_tokens += output_tokens
237240

238241
def _maybe_update_error_type(self, response: GenerateContentResponse):
@@ -331,7 +334,7 @@ def instrumented_generate_content(
331334
contents: Union[ContentListUnion, ContentListUnionDict],
332335
config: Optional[GenerateContentConfigOrDict] = None) -> GenerateContentResponse:
333336
helper = _GenerateContentInstrumentationHelper(self, otel_wrapper, model)
334-
with helper.start_span_as_current_span("google.genai.Models.generate_content"):
337+
with helper.start_span_as_current_span(model, "google.genai.Models.generate_content"):
335338
helper.process_request(contents, config)
336339
try:
337340
response = wrapped_func(self, model=model, contents=contents, config=config)
@@ -360,7 +363,7 @@ def instrumented_generate_content_stream(
360363
contents: Union[ContentListUnion, ContentListUnionDict],
361364
config: Optional[GenerateContentConfigOrDict] = None) -> Iterator[GenerateContentResponse]:
362365
helper = _GenerateContentInstrumentationHelper(self, otel_wrapper, model)
363-
with helper.start_span_as_current_span("google.genai.Models.generate_content_stream"):
366+
with helper.start_span_as_current_span(model, "google.genai.Models.generate_content_stream"):
364367
helper.process_request(contents, config)
365368
try:
366369
for response in wrapped_func(self, model=model, contents=contents, config=config):
@@ -389,7 +392,7 @@ async def instrumented_generate_content(
389392
contents: Union[ContentListUnion, ContentListUnionDict],
390393
config: Optional[GenerateContentConfigOrDict] = None) -> GenerateContentResponse:
391394
helper = _GenerateContentInstrumentationHelper(self, otel_wrapper, model)
392-
with helper.start_span_as_current_span("google.genai.AsyncModels.generate_content"):
395+
with helper.start_span_as_current_span(model, "google.genai.AsyncModels.generate_content"):
393396
helper.process_request(contents, config)
394397
try:
395398
response = await wrapped_func(self, model=model, contents=contents, config=config)
@@ -403,7 +406,8 @@ async def instrumented_generate_content(
403406
return instrumented_generate_content
404407

405408

406-
def _create_instrumented_async_generate_content_stream(
409+
# Disabling type checking because this is not yet implemented and tested fully.
410+
def _create_instrumented_async_generate_content_stream( # pyright: ignore
407411
snapshot: _MethodsSnapshot,
408412
otel_wrapper: OTelWrapper):
409413
wrapped_func = snapshot.async_generate_content_stream
@@ -416,14 +420,14 @@ async def instrumented_generate_content_stream(
416420
*,
417421
model: str,
418422
contents: Union[ContentListUnion, ContentListUnionDict],
419-
config: Optional[GenerateContentConfigOrDict] = None) -> Awaitable[AsyncIterator[GenerateContentResponse]]:
423+
config: Optional[GenerateContentConfigOrDict] = None) -> Awaitable[AsyncIterator[GenerateContentResponse]]: # pyright: ignore
420424
helper = _GenerateContentInstrumentationHelper(self, otel_wrapper, model)
421-
with helper.start_span_as_current_span("google.genai.AsyncModels.generate_content_stream"):
425+
with helper.start_span_as_current_span(model, "google.genai.AsyncModels.generate_content_stream"):
422426
helper.process_request(contents, config)
423427
try:
424-
async for response in wrapped_func(self, model=model, contents=contents, config=config):
428+
async for response in await wrapped_func(self, model=model, contents=contents, config=config): # pyright: ignore
425429
helper.process_response(response)
426-
yield response
430+
yield response # pyright: ignore
427431
except Exception as error:
428432
helper.process_error(error)
429433
raise

instrumentation-genai/opentelemetry-instrumentation-google-genai/tests/common/otel_mocker.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ def get_span_named(self, name):
168168

169169
def assert_has_span_named(self, name):
170170
span = self.get_span_named(name)
171-
finished_spans = self.get_finished_spans()
171+
finished_spans = [span.name for span in self.get_finished_spans()]
172172
assert span is not None, f'Could not find span named "{name}"; finished spans: {finished_spans}'
173173

174174
def get_event_named(self, event_name):

instrumentation-genai/opentelemetry-instrumentation-google-genai/tests/generate_content/nonstreaming_base.py

Lines changed: 26 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,8 @@ def setUp(self): # pylint: disable=invalid-name
5656
def generate_content(self, *args, **kwargs):
5757
raise NotImplementedError("Must implement 'generate_content'.")
5858

59-
def expected_span_name(self):
60-
raise NotImplementedError("Must implement 'expected_span_name'.")
59+
def expected_function_name(self):
60+
raise NotImplementedError("Must implement 'expected_function_name'.")
6161

6262
def configure_valid_response(self, response_text="The model_response", input_tokens=10, output_tokens=20):
6363
self.requests.add_response(create_valid_response(
@@ -78,26 +78,43 @@ def test_generates_span(self):
7878
model="gemini-2.0-flash",
7979
contents="Does this work?")
8080
self.assertEqual(response.text, "Yep, it works!")
81-
self.otel.assert_has_span_named(self.expected_span_name)
81+
self.otel.assert_has_span_named("generate_content [gemini-2.0-flash]")
82+
83+
def test_model_reflected_into_span_name(self):
84+
self.configure_valid_response(response_text="Yep, it works!")
85+
response = self.generate_content(
86+
model="gemini-1.5-flash",
87+
contents="Does this work?")
88+
self.assertEqual(response.text, "Yep, it works!")
89+
self.otel.assert_has_span_named("generate_content [gemini-1.5-flash]")
8290

8391
def test_generated_span_has_minimal_genai_attributes(self):
8492
self.configure_valid_response(response_text="Yep, it works!")
8593
self.generate_content(
8694
model="gemini-2.0-flash",
8795
contents="Does this work?")
88-
self.otel.assert_has_span_named(self.expected_span_name)
89-
span = self.otel.get_span_named(self.expected_span_name)
96+
self.otel.assert_has_span_named("generate_content [gemini-2.0-flash]")
97+
span = self.otel.get_span_named("generate_content [gemini-2.0-flash]")
9098
self.assertEqual(span.attributes["gen_ai.system"], "gemini")
9199
self.assertEqual(span.attributes["gen_ai.operation.name"], "GenerateContent")
92100

101+
def test_generated_span_has_correct_function_name(self):
102+
self.configure_valid_response(response_text="Yep, it works!")
103+
self.generate_content(
104+
model="gemini-2.0-flash",
105+
contents="Does this work?")
106+
self.otel.assert_has_span_named("generate_content [gemini-2.0-flash]")
107+
span = self.otel.get_span_named("generate_content [gemini-2.0-flash]")
108+
self.assertEqual(span.attributes["code.function.name"], self.expected_function_name)
109+
93110
def test_generated_span_has_vertex_ai_system_when_configured(self):
94111
self.set_use_vertex(True)
95112
self.configure_valid_response(response_text="Yep, it works!")
96113
self.generate_content(
97114
model="gemini-2.0-flash",
98115
contents="Does this work?")
99-
self.otel.assert_has_span_named(self.expected_span_name)
100-
span = self.otel.get_span_named(self.expected_span_name)
116+
self.otel.assert_has_span_named("generate_content [gemini-2.0-flash]")
117+
span = self.otel.get_span_named("generate_content [gemini-2.0-flash]")
101118
self.assertEqual(span.attributes["gen_ai.system"], "vertex_ai")
102119
self.assertEqual(span.attributes["gen_ai.operation.name"], "GenerateContent")
103120

@@ -106,8 +123,8 @@ def test_generated_span_counts_tokens(self):
106123
self.generate_content(
107124
model="gemini-2.0-flash",
108125
contents="Some input")
109-
self.otel.assert_has_span_named(self.expected_span_name)
110-
span = self.otel.get_span_named(self.expected_span_name)
126+
self.otel.assert_has_span_named("generate_content [gemini-2.0-flash]")
127+
span = self.otel.get_span_named("generate_content [gemini-2.0-flash]")
111128
self.assertEqual(span.attributes["gen_ai.usage.input_tokens"], 123)
112129
self.assertEqual(span.attributes["gen_ai.usage.output_tokens"], 456)
113130

instrumentation-genai/opentelemetry-instrumentation-google-genai/tests/generate_content/test_sync_nonstreaming.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def generate_content(self, *args, **kwargs):
2525
return self.client.models.generate_content(*args, **kwargs)
2626

2727
@property
28-
def expected_span_name(self):
28+
def expected_function_name(self):
2929
return "google.genai.Models.generate_content"
3030

3131

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
#!/bin/bash
2+
3+
# Copyright The OpenTelemetry Authors
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
SCRIPT_DIR=$(cd $(dirname "${BASH_SOURCE:-$0}"); pwd)
18+
PROJECT_DIR=$(readlink -f "${SCRIPT_DIR}/..")
19+
TESTS_DIR="${PROJECT_DIR}/tests"
20+
REQUIREMENTS_FILE="${TESTS_DIR}/requirements.txt"
21+
TYPECHECK_ENV="${PROJECT_DIR}/.test/.typecheck-venv"
22+
23+
function main() {
24+
if [ ! -d "${TYPECHECK_ENV}" ] ; then
25+
mkdir -p "${TYPECHECK_ENV}" || exit 1
26+
fi
27+
if [ ! -e "${TYPECHECK_ENV}/bin/activate" ] ; then
28+
python3 -m venv "${TYPECHECK_ENV}" || exit 1
29+
fi
30+
source "${TYPECHECK_ENV}/bin/activate" || exit 1
31+
pip install pyright || exit 1
32+
pip install -r "${REQUIREMENTS_FILE}" || exit 1
33+
cd "${PROJECT_DIR}" || exit 1
34+
pyright --venvpath "${TYPECHECK_ENV}" || exit $?
35+
}
36+
37+
main

0 commit comments

Comments
 (0)