Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

- Implement uninstrument for `opentelemetry-instrumentation-vertexai`
([#3328](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3328))

## Version 2.0b0 (2025-02-24)

- Added Vertex AI spans for request parameters
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@

from opentelemetry._events import get_event_logger
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.instrumentation.vertexai.package import _instruments
from opentelemetry.instrumentation.vertexai.patch import (
generate_content_create,
Expand All @@ -56,6 +57,23 @@
from opentelemetry.trace import get_tracer


def _client_classes():
# This import is very slow, do it lazily in case instrument() is not called

# pylint: disable=import-outside-toplevel
from google.cloud.aiplatform_v1.services.prediction_service import (
client,
)
from google.cloud.aiplatform_v1beta1.services.prediction_service import (
client as client_v1beta1,
)

return (
client.PredictionServiceClient,
client_v1beta1.PredictionServiceClient,
)


class VertexAIInstrumentor(BaseInstrumentor):
def instrumentation_dependencies(self) -> Collection[str]:
return _instruments
Expand All @@ -77,20 +95,15 @@ def _instrument(self, **kwargs: Any):
event_logger_provider=event_logger_provider,
)

wrap_function_wrapper(
module="google.cloud.aiplatform_v1beta1.services.prediction_service.client",
name="PredictionServiceClient.generate_content",
wrapper=generate_content_create(
tracer, event_logger, is_content_enabled()
),
)
wrap_function_wrapper(
module="google.cloud.aiplatform_v1.services.prediction_service.client",
name="PredictionServiceClient.generate_content",
wrapper=generate_content_create(
tracer, event_logger, is_content_enabled()
),
)
for client_class in _client_classes():
wrap_function_wrapper(
client_class,
name="generate_content",
wrapper=generate_content_create(
tracer, event_logger, is_content_enabled()
),
)

def _uninstrument(self, **kwargs: Any) -> None:
"""TODO: implemented in later PR"""
for client_class in _client_classes():
unwrap(client_class, "generate_content")
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,8 @@ def instrument_no_content(

yield instrumentor
os.environ.pop(OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT, None)
instrumentor.uninstrument()
if instrumentor.is_instrumented_by_opentelemetry:
instrumentor.uninstrument()


@pytest.fixture
Expand All @@ -130,7 +131,8 @@ def instrument_with_content(

yield instrumentor
os.environ.pop(OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT, None)
instrumentor.uninstrument()
if instrumentor.is_instrumented_by_opentelemetry:
instrumentor.uninstrument()


@pytest.fixture(scope="module")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import pytest
from google.cloud.aiplatform_v1.services.prediction_service import client
from google.cloud.aiplatform_v1beta1.services.prediction_service import (
client as client_v1beta1,
)

from opentelemetry.instrumentation.vertexai import VertexAIInstrumentor


@pytest.fixture(
name="client_class",
params=[
pytest.param(client.PredictionServiceClient, id="v1"),
pytest.param(client_v1beta1.PredictionServiceClient, id="v1beta1"),
],
)
def fixture_client_class(request: pytest.FixtureRequest):
return request.param


def test_instruments(
instrument_with_content: VertexAIInstrumentor, client_class
):
assert hasattr(client_class.generate_content, "__wrapped__")


def test_uninstruments(
instrument_with_content: VertexAIInstrumentor, client_class
):
instrument_with_content.uninstrument()
assert not hasattr(client_class.generate_content, "__wrapped__")