From 6dde48c77738070e1f8ec3b6afdad3158677821d Mon Sep 17 00:00:00 2001 From: Dylan Russell Date: Tue, 4 Feb 2025 21:14:31 +0000 Subject: [PATCH 1/7] Add space to logging exporter README. Make minor update to releasing.md. --- docs/releasing.md | 13 ++----------- opentelemetry-exporter-gcp-logging/README.rst | 1 + 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/docs/releasing.md b/docs/releasing.md index aa6a9f46..7d4bda63 100644 --- a/docs/releasing.md +++ b/docs/releasing.md @@ -89,17 +89,8 @@ with: **pointing at the first commit (a)** that was merged into main. For the example PR listed above, that creates release [`v0.11b0@4ad9ccd`](https://github.com/GoogleCloudPlatform/opentelemetry-operations-python/releases/tag/v0.11b0). -- In description, paste a changelog for the packages. I used this (probably -buggy) small script for the example PR's tag: - - ```bash - for cl in opentelemetry-*/CHANGELOG.md; do - if cl_entries=`pcregrep -M -o1 "^## Version 0\.10b0$\n\n^Released.*\n\n((?:- [\s\S]+?)*?)(?=(\s+##|\Z))" $cl` - then - echo -e "# `dirname $cl`\n$cl_entries" - fi - done - ``` + +Click the "Generate release notes" button in the UI to get an autogenerated changelog for the packages. Once the release tag is created, move the `stable` tag to point to the same commit. diff --git a/opentelemetry-exporter-gcp-logging/README.rst b/opentelemetry-exporter-gcp-logging/README.rst index 9fa6a4d9..89fa70be 100644 --- a/opentelemetry-exporter-gcp-logging/README.rst +++ b/opentelemetry-exporter-gcp-logging/README.rst @@ -35,6 +35,7 @@ Usage ----- .. code:: python + import logging from opentelemetry.exporter.cloud_logging import ( CloudLoggingExporter, From e67ef590d8f96c807c4a6f56857da9e0a19363a9 Mon Sep 17 00:00:00 2001 From: Dylan Russell Date: Wed, 12 Mar 2025 17:58:08 +0000 Subject: [PATCH 2/7] Update code examples for how to use the GoogleCloudResourceDetector. Delete the old deprecated resource detector in __init__.py, and replace it with the new resource detector from _detector.py. Add an entry point to setup.cfg so that the GCP resource detector works with auto instrumentation. --- opentelemetry-resourcedetector-gcp/README.rst | 17 +- opentelemetry-resourcedetector-gcp/setup.cfg | 4 + .../gcp_resource_detector/__init__.py | 266 +++---- .../gcp_resource_detector/_detector.py | 140 ---- ...r.ambr => test_gcp_resource_detector.ambr} | 0 .../tests/test_detector.py | 199 ----- .../tests/test_gcp_resource_detector.py | 677 +++++------------- .../setup_opentelemetry.py | 11 +- 8 files changed, 289 insertions(+), 1025 deletions(-) delete mode 100644 opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/_detector.py rename opentelemetry-resourcedetector-gcp/tests/__snapshots__/{test_detector.ambr => test_gcp_resource_detector.ambr} (100%) delete mode 100644 opentelemetry-resourcedetector-gcp/tests/test_detector.py diff --git a/opentelemetry-resourcedetector-gcp/README.rst b/opentelemetry-resourcedetector-gcp/README.rst index a4a1ed8e..fe1bcb3c 100644 --- a/opentelemetry-resourcedetector-gcp/README.rst +++ b/opentelemetry-resourcedetector-gcp/README.rst @@ -25,8 +25,23 @@ Installation pip install opentelemetry-resourcedetector-gcp .. - TODO: Add usage info here +Usage +------------ + +.. code:: python + from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector + from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import BatchSpanProcessor + from opentelemetry import trace + + resource = GoogleCloudResourceDetector().detect() + traceProvider = TracerProvider(resource=resource) + processor = BatchSpanProcessor(OTLPSpanExporter()) + traceProvider.add_span_processor(processor) + trace.set_tracer_provider(traceProvider) +.. References ---------- diff --git a/opentelemetry-resourcedetector-gcp/setup.cfg b/opentelemetry-resourcedetector-gcp/setup.cfg index 47b004de..6811700e 100644 --- a/opentelemetry-resourcedetector-gcp/setup.cfg +++ b/opentelemetry-resourcedetector-gcp/setup.cfg @@ -39,3 +39,7 @@ where = src [options.extras_require] test = + +[options.entry_points] +opentelemetry_resource_detector = + gcp_resource_detector = opentelemetry.resourcedetector.gcp_resource_detector:GoogleCloudResourceDetector \ No newline at end of file diff --git a/opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/__init__.py b/opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/__init__.py index 6dd03a5d..fcd2631c 100644 --- a/opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/__init__.py +++ b/opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,195 +12,129 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging -import os +from typing import Mapping -import requests -from opentelemetry.context import attach, detach, set_value +from opentelemetry.resourcedetector.gcp_resource_detector import ( + _faas, + _gae, + _gce, + _gke, + _metadata, +) +from opentelemetry.resourcedetector.gcp_resource_detector._constants import ( + ResourceAttributes, +) from opentelemetry.sdk.resources import Resource, ResourceDetector +from opentelemetry.util.types import AttributeValue -_GCP_METADATA_URL = ( - "http://metadata.google.internal/computeMetadata/v1/?recursive=true" -) -_GCP_METADATA_URL_HEADER = {"Metadata-Flavor": "Google"} -_TIMEOUT_SEC = 5 - -logger = logging.getLogger(__name__) - - -def _get_google_metadata_and_common_attributes(): - token = attach(set_value("suppress_instrumentation", True)) - all_metadata = requests.get( - _GCP_METADATA_URL, - headers=_GCP_METADATA_URL_HEADER, - timeout=_TIMEOUT_SEC, - ).json() - detach(token) - common_attributes = { - "cloud.account.id": all_metadata["project"]["projectId"], - "cloud.provider": "gcp", - "cloud.zone": all_metadata["instance"]["zone"].split("/")[-1], - } - return common_attributes, all_metadata - - -def get_gce_resources(): - """Resource finder for common GCE attributes - - See: https://cloud.google.com/compute/docs/storing-retrieving-metadata - """ - ( - common_attributes, - all_metadata, - ) = _get_google_metadata_and_common_attributes() - common_attributes.update( + +class GoogleCloudResourceDetector(ResourceDetector): + def detect(self) -> Resource: + # pylint: disable=too-many-return-statements + if not _metadata.is_available(): + return Resource.get_empty() + + if _gke.on_gke(): + return _gke_resource() + if _faas.on_cloud_functions(): + return _cloud_functions_resource() + if _faas.on_cloud_run(): + return _cloud_run_resource() + if _gae.on_app_engine(): + return _gae_resource() + if _gce.on_gce(): + return _gce_resource() + + return Resource.get_empty() + + +def _gke_resource() -> Resource: + zone_or_region = _gke.availability_zone_or_region() + zone_or_region_key = ( + ResourceAttributes.CLOUD_AVAILABILITY_ZONE + if zone_or_region.type == "zone" + else ResourceAttributes.CLOUD_REGION + ) + return _make_resource( { - "host.id": all_metadata["instance"]["id"], - "gcp.resource_type": "gce_instance", + ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_KUBERNETES_ENGINE, + zone_or_region_key: zone_or_region.value, + ResourceAttributes.K8S_CLUSTER_NAME: _gke.cluster_name(), + ResourceAttributes.HOST_ID: _gke.host_id(), } ) - return common_attributes - -def get_gke_resources(): - """Resource finder for GKE attributes""" - if os.getenv("KUBERNETES_SERVICE_HOST") is None: - return {} - - ( - common_attributes, - all_metadata, - ) = _get_google_metadata_and_common_attributes() - - container_name = os.getenv("CONTAINER_NAME") - if container_name is not None: - common_attributes["container.name"] = container_name - - # Fallback to reading namespace from a file is the env var is not set - pod_namespace = os.getenv("NAMESPACE") - if pod_namespace is None: - try: - with open( - "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" - ) as namespace_file: - pod_namespace = namespace_file.read().strip() - except FileNotFoundError: - pod_namespace = "" - - common_attributes.update( +def _gce_resource() -> Resource: + zone_and_region = _gce.availability_zone_and_region() + return _make_resource( { - "k8s.cluster.name": all_metadata["instance"]["attributes"][ - "cluster-name" - ], - "k8s.namespace.name": pod_namespace, - "k8s.pod.name": os.getenv("POD_NAME", os.getenv("HOSTNAME", "")), - "host.id": all_metadata["instance"]["id"], - "gcp.resource_type": "gke_container", + ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_COMPUTE_ENGINE, + ResourceAttributes.CLOUD_AVAILABILITY_ZONE: zone_and_region.zone, + ResourceAttributes.CLOUD_REGION: zone_and_region.region, + ResourceAttributes.HOST_TYPE: _gce.host_type(), + ResourceAttributes.HOST_ID: _gce.host_id(), + ResourceAttributes.HOST_NAME: _gce.host_name(), } ) - return common_attributes - - -def get_cloudrun_resources(): - """Resource finder for Cloud Run attributes""" - - if os.getenv("K_CONFIGURATION") is None: - return {} - ( - common_attributes, - all_metadata, - ) = _get_google_metadata_and_common_attributes() - faas_name = os.getenv("K_SERVICE") - if faas_name is not None: - common_attributes["faas.name"] = str(faas_name) - - faas_version = os.getenv("K_REVISION") - if faas_version is not None: - common_attributes["faas.version"] = str(faas_version) - - common_attributes.update( +def _cloud_run_resource() -> Resource: + return _make_resource( { - "cloud.platform": "gcp_cloud_run", - "cloud.region": all_metadata["instance"]["region"].split("/")[-1], - "faas.instance": all_metadata["instance"]["id"], - "gcp.resource_type": "cloud_run", + ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_CLOUD_RUN, + ResourceAttributes.FAAS_NAME: _faas.faas_name(), + ResourceAttributes.FAAS_VERSION: _faas.faas_version(), + ResourceAttributes.FAAS_INSTANCE: _faas.faas_instance(), + ResourceAttributes.CLOUD_REGION: _faas.faas_cloud_region(), } ) - return common_attributes - - -def get_cloudfunctions_resources(): - """Resource finder for Cloud Functions attributes""" - - if os.getenv("FUNCTION_TARGET") is None: - return {} - - ( - common_attributes, - all_metadata, - ) = _get_google_metadata_and_common_attributes() - - faas_name = os.getenv("K_SERVICE") - if faas_name is not None: - common_attributes["faas.name"] = str(faas_name) - faas_version = os.getenv("K_REVISION") - if faas_version is not None: - common_attributes["faas.version"] = str(faas_version) - common_attributes.update( +def _cloud_functions_resource() -> Resource: + return _make_resource( { - "cloud.platform": "gcp_cloud_functions", - "cloud.region": all_metadata["instance"]["region"].split("/")[-1], - "faas.instance": all_metadata["instance"]["id"], - "gcp.resource_type": "cloud_functions", + ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_CLOUD_FUNCTIONS, + ResourceAttributes.FAAS_NAME: _faas.faas_name(), + ResourceAttributes.FAAS_VERSION: _faas.faas_version(), + ResourceAttributes.FAAS_INSTANCE: _faas.faas_instance(), + ResourceAttributes.CLOUD_REGION: _faas.faas_cloud_region(), } ) - return common_attributes -# Order here matters. Since a GKE_CONTAINER is a specialized type of GCE_INSTANCE -# We need to first check if it matches the criteria for being a GKE_CONTAINER -# before falling back and checking if its a GCE_INSTANCE. -# This list should be sorted from most specialized to least specialized. -_RESOURCE_FINDERS = [ - ("gke_container", get_gke_resources), - ("cloud_run", get_cloudrun_resources), - ("cloud_functions", get_cloudfunctions_resources), - ("gce_instance", get_gce_resources), -] +def _gae_resource() -> Resource: + if _gae.on_app_engine_standard(): + zone = _gae.standard_availability_zone() + region = _gae.standard_cloud_region() + else: + zone_and_region = _gae.flex_availability_zone_and_region() + zone = zone_and_region.zone + region = zone_and_region.region + faas_name = _gae.service_name() + faas_version = _gae.service_version() + faas_instance = _gae.service_instance() -class NoGoogleResourcesFound(Exception): - pass + return _make_resource( + { + ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_APP_ENGINE, + ResourceAttributes.FAAS_NAME: faas_name, + ResourceAttributes.FAAS_VERSION: faas_version, + ResourceAttributes.FAAS_INSTANCE: faas_instance, + ResourceAttributes.CLOUD_AVAILABILITY_ZONE: zone, + ResourceAttributes.CLOUD_REGION: region, + } + ) -class GoogleCloudResourceDetector(ResourceDetector): - def __init__(self, raise_on_error=False): - super().__init__(raise_on_error) - self.cached = False - self.gcp_resources = {} - - def detect(self) -> "Resource": - if not self.cached: - self.cached = True - for resource_type, resource_finder in _RESOURCE_FINDERS: - try: - found_resources = resource_finder() - # pylint: disable=broad-except - except Exception as ex: - logger.warning( - "Exception %s occured attempting %s resource detection", - ex, - resource_type, - ) - found_resources = None - if found_resources: - self.gcp_resources = found_resources - break - if self.raise_on_error and not self.gcp_resources: - raise NoGoogleResourcesFound() - return Resource(self.gcp_resources) +def _make_resource(attrs: Mapping[str, AttributeValue]) -> Resource: + return Resource( + { + ResourceAttributes.CLOUD_PROVIDER: "gcp", + ResourceAttributes.CLOUD_ACCOUNT_ID: _metadata.get_metadata()[ + "project" + ]["projectId"], + **attrs, + } + ) diff --git a/opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/_detector.py b/opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/_detector.py deleted file mode 100644 index 2fea9514..00000000 --- a/opentelemetry-resourcedetector-gcp/src/opentelemetry/resourcedetector/gcp_resource_detector/_detector.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Mapping - -from opentelemetry.resourcedetector.gcp_resource_detector import ( - _faas, - _gae, - _gce, - _gke, - _metadata, -) -from opentelemetry.resourcedetector.gcp_resource_detector._constants import ( - ResourceAttributes, -) -from opentelemetry.sdk.resources import Resource, ResourceDetector -from opentelemetry.util.types import AttributeValue - - -class GoogleCloudResourceDetector(ResourceDetector): - def detect(self) -> Resource: - # pylint: disable=too-many-return-statements - if not _metadata.is_available(): - return Resource.get_empty() - - if _gke.on_gke(): - return _gke_resource() - if _faas.on_cloud_functions(): - return _cloud_functions_resource() - if _faas.on_cloud_run(): - return _cloud_run_resource() - if _gae.on_app_engine(): - return _gae_resource() - if _gce.on_gce(): - return _gce_resource() - - return Resource.get_empty() - - -def _gke_resource() -> Resource: - zone_or_region = _gke.availability_zone_or_region() - zone_or_region_key = ( - ResourceAttributes.CLOUD_AVAILABILITY_ZONE - if zone_or_region.type == "zone" - else ResourceAttributes.CLOUD_REGION - ) - return _make_resource( - { - ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_KUBERNETES_ENGINE, - zone_or_region_key: zone_or_region.value, - ResourceAttributes.K8S_CLUSTER_NAME: _gke.cluster_name(), - ResourceAttributes.HOST_ID: _gke.host_id(), - } - ) - - -def _gce_resource() -> Resource: - zone_and_region = _gce.availability_zone_and_region() - return _make_resource( - { - ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_COMPUTE_ENGINE, - ResourceAttributes.CLOUD_AVAILABILITY_ZONE: zone_and_region.zone, - ResourceAttributes.CLOUD_REGION: zone_and_region.region, - ResourceAttributes.HOST_TYPE: _gce.host_type(), - ResourceAttributes.HOST_ID: _gce.host_id(), - ResourceAttributes.HOST_NAME: _gce.host_name(), - } - ) - - -def _cloud_run_resource() -> Resource: - return _make_resource( - { - ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_CLOUD_RUN, - ResourceAttributes.FAAS_NAME: _faas.faas_name(), - ResourceAttributes.FAAS_VERSION: _faas.faas_version(), - ResourceAttributes.FAAS_INSTANCE: _faas.faas_instance(), - ResourceAttributes.CLOUD_REGION: _faas.faas_cloud_region(), - } - ) - - -def _cloud_functions_resource() -> Resource: - return _make_resource( - { - ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_CLOUD_FUNCTIONS, - ResourceAttributes.FAAS_NAME: _faas.faas_name(), - ResourceAttributes.FAAS_VERSION: _faas.faas_version(), - ResourceAttributes.FAAS_INSTANCE: _faas.faas_instance(), - ResourceAttributes.CLOUD_REGION: _faas.faas_cloud_region(), - } - ) - - -def _gae_resource() -> Resource: - if _gae.on_app_engine_standard(): - zone = _gae.standard_availability_zone() - region = _gae.standard_cloud_region() - else: - zone_and_region = _gae.flex_availability_zone_and_region() - zone = zone_and_region.zone - region = zone_and_region.region - - faas_name = _gae.service_name() - faas_version = _gae.service_version() - faas_instance = _gae.service_instance() - - return _make_resource( - { - ResourceAttributes.CLOUD_PLATFORM_KEY: ResourceAttributes.GCP_APP_ENGINE, - ResourceAttributes.FAAS_NAME: faas_name, - ResourceAttributes.FAAS_VERSION: faas_version, - ResourceAttributes.FAAS_INSTANCE: faas_instance, - ResourceAttributes.CLOUD_AVAILABILITY_ZONE: zone, - ResourceAttributes.CLOUD_REGION: region, - } - ) - - -def _make_resource(attrs: Mapping[str, AttributeValue]) -> Resource: - return Resource( - { - ResourceAttributes.CLOUD_PROVIDER: "gcp", - ResourceAttributes.CLOUD_ACCOUNT_ID: _metadata.get_metadata()[ - "project" - ]["projectId"], - **attrs, - } - ) diff --git a/opentelemetry-resourcedetector-gcp/tests/__snapshots__/test_detector.ambr b/opentelemetry-resourcedetector-gcp/tests/__snapshots__/test_gcp_resource_detector.ambr similarity index 100% rename from opentelemetry-resourcedetector-gcp/tests/__snapshots__/test_detector.ambr rename to opentelemetry-resourcedetector-gcp/tests/__snapshots__/test_gcp_resource_detector.ambr diff --git a/opentelemetry-resourcedetector-gcp/tests/test_detector.py b/opentelemetry-resourcedetector-gcp/tests/test_detector.py deleted file mode 100644 index dfa3ca89..00000000 --- a/opentelemetry-resourcedetector-gcp/tests/test_detector.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest.mock import Mock - -import pytest -import requests -from opentelemetry.resourcedetector.gcp_resource_detector import _metadata -from opentelemetry.resourcedetector.gcp_resource_detector._detector import ( - GoogleCloudResourceDetector, -) - - -@pytest.fixture(name="reset_cache") -def fixture_reset_cache(): - yield - _metadata.get_metadata.cache_clear() - _metadata.is_available.cache_clear() - - -@pytest.fixture(name="fake_get") -def fixture_fake_get(monkeypatch: pytest.MonkeyPatch): - mock = Mock() - monkeypatch.setattr(requests, "get", mock) - return mock - - -@pytest.fixture(name="fake_metadata") -def fixture_fake_metadata(fake_get: Mock): - json = {"instance": {}, "project": {}} - fake_get().json.return_value = json - return json - - -# Reset stuff before every test -# pylint: disable=unused-argument -@pytest.fixture(autouse=True) -def autouse(reset_cache, fake_get, fake_metadata): - pass - - -def test_detects_empty_when_not_available(snapshot, fake_get: Mock): - fake_get.side_effect = requests.HTTPError() - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot - - -def test_detects_empty_as_fallback(snapshot): - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot - - -def test_detects_gce(snapshot, fake_metadata: _metadata.Metadata): - fake_metadata.update( - { - "project": {"projectId": "fakeProject"}, - "instance": { - "name": "fakeName", - "id": "0087244a", - "machineType": "fakeMachineType", - "zone": "projects/233510669999/zones/us-east4-b", - "attributes": {}, - }, - } - ) - - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot - - -@pytest.mark.parametrize( - "cluster_location", - ( - pytest.param("us-east4", id="regional"), - pytest.param("us-east4-b", id="zonal"), - ), -) -def test_detects_gke( - cluster_location: str, - snapshot, - fake_metadata: _metadata.Metadata, - monkeypatch: pytest.MonkeyPatch, -): - monkeypatch.setenv("KUBERNETES_SERVICE_HOST", "fakehost") - fake_metadata.update( - { - "project": {"projectId": "fakeProject"}, - "instance": { - "name": "fakeName", - "id": 12345, - "machineType": "fakeMachineType", - "zone": "projects/233510669999/zones/us-east4-b", - # Plus some attributes - "attributes": { - "cluster-name": "fakeClusterName", - "cluster-location": cluster_location, - }, - }, - } - ) - - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot - - -def test_detects_cloud_run( - snapshot, - fake_metadata: _metadata.Metadata, - monkeypatch: pytest.MonkeyPatch, -): - monkeypatch.setenv("K_CONFIGURATION", "fake-configuration") - monkeypatch.setenv("K_SERVICE", "fake-service") - monkeypatch.setenv("K_REVISION", "fake-revision") - fake_metadata.update( - { - "project": {"projectId": "fakeProject"}, - "instance": { - # this will not be numeric on FaaS - "id": "0087244a", - "region": "projects/233510669999/regions/us-east4", - }, - } - ) - - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot - - -def test_detects_cloud_functions( - snapshot, - fake_metadata: _metadata.Metadata, - monkeypatch: pytest.MonkeyPatch, -): - monkeypatch.setenv("FUNCTION_TARGET", "fake-function-target") - # Note all K_* environment variables are set since Cloud Functions executes within Cloud - # Run. This tests that the detector can differentiate between them - monkeypatch.setenv("K_CONFIGURATION", "fake-configuration") - monkeypatch.setenv("K_SERVICE", "fake-service") - monkeypatch.setenv("K_REVISION", "fake-revision") - fake_metadata.update( - { - "project": {"projectId": "fakeProject"}, - "instance": { - # this will not be numeric on FaaS - "id": "0087244a", - "region": "projects/233510669999/regions/us-east4", - }, - } - ) - - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot - - -def test_detects_gae_standard( - snapshot, - fake_metadata: _metadata.Metadata, - monkeypatch: pytest.MonkeyPatch, -): - monkeypatch.setenv("GAE_ENV", "standard") - monkeypatch.setenv("GAE_SERVICE", "fake-service") - monkeypatch.setenv("GAE_VERSION", "fake-version") - monkeypatch.setenv("GAE_INSTANCE", "fake-instance") - fake_metadata.update( - { - "project": {"projectId": "fakeProject"}, - "instance": { - "region": "projects/233510669999/regions/us-east4", - "zone": "us-east4-b", - }, - } - ) - - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot - - -def test_detects_gae_flex( - snapshot, - fake_metadata: _metadata.Metadata, - monkeypatch: pytest.MonkeyPatch, -): - monkeypatch.setenv("GAE_SERVICE", "fake-service") - monkeypatch.setenv("GAE_VERSION", "fake-version") - monkeypatch.setenv("GAE_INSTANCE", "fake-instance") - fake_metadata.update( - { - "project": {"projectId": "fakeProject"}, - "instance": { - "zone": "projects/233510669999/zones/us-east4-b", - }, - } - ) - - assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot diff --git a/opentelemetry-resourcedetector-gcp/tests/test_gcp_resource_detector.py b/opentelemetry-resourcedetector-gcp/tests/test_gcp_resource_detector.py index 402828fa..d1ef14cb 100644 --- a/opentelemetry-resourcedetector-gcp/tests/test_gcp_resource_detector.py +++ b/opentelemetry-resourcedetector-gcp/tests/test_gcp_resource_detector.py @@ -1,10 +1,10 @@ -# Copyright 2021 The OpenTelemetry Authors +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,531 +12,188 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import unittest -from unittest import mock +from unittest.mock import Mock +import pytest +import requests from opentelemetry.resourcedetector.gcp_resource_detector import ( - _GCP_METADATA_URL, GoogleCloudResourceDetector, - NoGoogleResourcesFound, - get_cloudfunctions_resources, - get_cloudrun_resources, - get_gce_resources, - get_gke_resources, + _metadata, ) -from opentelemetry.sdk.resources import Resource - -NAMESPACE = "NAMESPACE" -CONTAINER_NAME = "CONTAINER_NAME" -HOSTNAME = "HOSTNAME" -POD_NAME = "POD_NAME" -KUBERNETES_SERVICE_HOST = "KUBERNETES_SERVICE_HOST" -K_CONFIGURATION = "K_CONFIGURATION" -FUNCTION_TARGET = "FUNCTION_TARGET" -K_SERVICE = "K_SERVICE" -K_REVISION = "K_REVISION" - -GCE_RESOURCES_JSON_STRING = { - "instance": {"id": "instance_id", "zone": "projects/123/zones/zone"}, - "project": {"projectId": "project_id"}, -} - -GKE_RESOURCES_JSON_STRING = { - "instance": { - "id": "instance_id", - "zone": "projects/123/zones/zone", - "attributes": {"cluster-name": "cluster_name"}, - }, - "project": {"projectId": "project_id"}, -} - -CLOUDRUN_RESOURCES_JSON_STRING = { - "instance": { - "id": "instance_id", - "zone": "projects/123/zones/zone", - "region": "projects/123/regions/region", - }, - "project": {"projectId": "project_id"}, -} - -CLOUDFUNCTIONS_RESOURCES_JSON_STRING = { - "instance": { - "id": "instance_id", - "zone": "projects/123/zones/zone", - "region": "projects/123/regions/region", - }, - "project": {"projectId": "project_id"}, -} - - -@mock.patch( - "opentelemetry.resourcedetector.gcp_resource_detector.requests.get", - **{"return_value.json.return_value": GCE_RESOURCES_JSON_STRING} -) -class TestGCEResourceFinder(unittest.TestCase): - def test_finding_gce_resources(self, getter): - found_resources = get_gce_resources() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - { - "host.id": "instance_id", - "cloud.provider": "gcp", - "cloud.account.id": "project_id", - "cloud.zone": "zone", - "gcp.resource_type": "gce_instance", - }, - ) -def pop_environ_key(key): - if key in os.environ: - os.environ.pop(key) +@pytest.fixture(name="reset_cache") +def fixture_reset_cache(): + yield + _metadata.get_metadata.cache_clear() + _metadata.is_available.cache_clear() -patch_env = mock.patch.dict(os.environ, {}, clear=True) +@pytest.fixture(name="fake_get") +def fixture_fake_get(monkeypatch: pytest.MonkeyPatch): + mock = Mock() + monkeypatch.setattr(requests, "get", mock) + return mock -@patch_env -@mock.patch( - "opentelemetry.resourcedetector.gcp_resource_detector.requests.get", - **{"return_value.json.return_value": GKE_RESOURCES_JSON_STRING} -) -class TestGKEResourceFinder(unittest.TestCase): - - # pylint: disable=unused-argument - def test_not_running_on_gke(self, getter): - pop_environ_key(KUBERNETES_SERVICE_HOST) - found_resources = get_gke_resources() - self.assertEqual(found_resources, {}) - - # pylint: disable=unused-argument - def test_missing_container_name(self, getter): - os.environ[KUBERNETES_SERVICE_HOST] = "10.0.0.1" - pop_environ_key(CONTAINER_NAME) - found_resources = get_gke_resources() - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "k8s.cluster.name": "cluster_name", - "k8s.namespace.name": "", - "host.id": "instance_id", - "k8s.pod.name": "", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "gke_container", - }, - ) - - # pylint: disable=unused-argument - def test_environment_empty_strings(self, getter): - os.environ[KUBERNETES_SERVICE_HOST] = "10.0.0.1" - os.environ[CONTAINER_NAME] = "" - os.environ[NAMESPACE] = "" - found_resources = get_gke_resources() - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "k8s.cluster.name": "cluster_name", - "k8s.namespace.name": "", - "host.id": "instance_id", - "k8s.pod.name": "", - "container.name": "", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "gke_container", - }, - ) - - def test_missing_namespace_file(self, getter): - os.environ[KUBERNETES_SERVICE_HOST] = "10.0.0.1" - os.environ[CONTAINER_NAME] = "container_name" - found_resources = get_gke_resources() - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "k8s.cluster.name": "cluster_name", - "k8s.namespace.name": "", - "host.id": "instance_id", - "k8s.pod.name": "", - "container.name": "container_name", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "gke_container", - }, - ) - - def test_finding_gke_resources(self, getter): - os.environ[KUBERNETES_SERVICE_HOST] = "10.0.0.1" - os.environ[NAMESPACE] = "namespace" - os.environ[CONTAINER_NAME] = "container_name" - os.environ[HOSTNAME] = "host_name" - found_resources = get_gke_resources() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "k8s.cluster.name": "cluster_name", - "k8s.namespace.name": "namespace", - "host.id": "instance_id", - "k8s.pod.name": "host_name", - "container.name": "container_name", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "gke_container", - }, - ) - - def test_finding_gke_resources_with_pod_name(self, getter): - os.environ[KUBERNETES_SERVICE_HOST] = "10.0.0.1" - os.environ[NAMESPACE] = "namespace" - os.environ[CONTAINER_NAME] = "container_name" - os.environ[HOSTNAME] = "host_name" - os.environ[POD_NAME] = "pod_name" - found_resources = get_gke_resources() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "k8s.cluster.name": "cluster_name", - "k8s.namespace.name": "namespace", - "host.id": "instance_id", - "k8s.pod.name": "pod_name", - "container.name": "container_name", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "gke_container", - }, - ) +@pytest.fixture(name="fake_metadata") +def fixture_fake_metadata(fake_get: Mock): + json = {"instance": {}, "project": {}} + fake_get().json.return_value = json + return json -@patch_env -@mock.patch( - "opentelemetry.resourcedetector.gcp_resource_detector.requests.get", - **{"return_value.json.return_value": CLOUDRUN_RESOURCES_JSON_STRING} -) -class TestCloudRunResourceFinder(unittest.TestCase): - - # pylint: disable=unused-argument - def test_not_running_on_cloudrun(self, getter): - pop_environ_key(K_CONFIGURATION) - found_resources = get_cloudrun_resources() - self.assertEqual(found_resources, {}) - - # pylint: disable=unused-argument - def test_missing_service_name(self, getter): - os.environ[K_CONFIGURATION] = "cloudrun_config" - pop_environ_key(K_SERVICE) - pop_environ_key(K_REVISION) - found_resources = get_cloudrun_resources() - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_run", - "cloud.region": "region", - "faas.instance": "instance_id", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_run", - }, - ) - - # pylint: disable=unused-argument - def test_environment_empty_strings(self, getter): - os.environ[K_CONFIGURATION] = "cloudrun_config" - os.environ[K_SERVICE] = "" - os.environ[K_REVISION] = "" - found_resources = get_cloudrun_resources() - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_run", - "cloud.region": "region", - "faas.instance": "instance_id", - "faas.name": "", - "faas.version": "", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_run", - }, - ) - - def test_finding_cloudrun_resources(self, getter): - os.environ[K_CONFIGURATION] = "cloudrun_config" - os.environ[K_SERVICE] = "service" - os.environ[K_REVISION] = "revision" - found_resources = get_cloudrun_resources() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_run", - "cloud.region": "region", - "faas.instance": "instance_id", - "faas.name": "service", - "faas.version": "revision", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_run", +# Reset stuff before every test +# pylint: disable=unused-argument +@pytest.fixture(autouse=True) +def autouse(reset_cache, fake_get, fake_metadata): + pass + + +def test_detects_empty_when_not_available(snapshot, fake_get: Mock): + fake_get.side_effect = requests.HTTPError() + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot + + +def test_detects_empty_as_fallback(snapshot): + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot + + +def test_detects_gce(snapshot, fake_metadata: _metadata.Metadata): + fake_metadata.update( + { + "project": {"projectId": "fakeProject"}, + "instance": { + "name": "fakeName", + "id": "0087244a", + "machineType": "fakeMachineType", + "zone": "projects/233510669999/zones/us-east4-b", + "attributes": {}, }, - ) + } + ) + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot -@patch_env -@mock.patch( - "opentelemetry.resourcedetector.gcp_resource_detector.requests.get", - **{"return_value.json.return_value": CLOUDFUNCTIONS_RESOURCES_JSON_STRING} + +@pytest.mark.parametrize( + "cluster_location", + ( + pytest.param("us-east4", id="regional"), + pytest.param("us-east4-b", id="zonal"), + ), ) -class TestCloudFunctionsResourceFinder(unittest.TestCase): - # pylint: disable=unused-argument - def test_not_running_on_cloudfunctions(self, getter): - pop_environ_key(FUNCTION_TARGET) - found_resources = get_cloudfunctions_resources() - self.assertEqual(found_resources, {}) - - # pylint: disable=unused-argument - def test_missing_service_name(self, getter): - os.environ[FUNCTION_TARGET] = "function" - pop_environ_key(K_SERVICE) - pop_environ_key(K_REVISION) - found_resources = get_cloudfunctions_resources() - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_functions", - "cloud.region": "region", - "faas.instance": "instance_id", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_functions", +def test_detects_gke( + cluster_location: str, + snapshot, + fake_metadata: _metadata.Metadata, + monkeypatch: pytest.MonkeyPatch, +): + monkeypatch.setenv("KUBERNETES_SERVICE_HOST", "fakehost") + fake_metadata.update( + { + "project": {"projectId": "fakeProject"}, + "instance": { + "name": "fakeName", + "id": 12345, + "machineType": "fakeMachineType", + "zone": "projects/233510669999/zones/us-east4-b", + # Plus some attributes + "attributes": { + "cluster-name": "fakeClusterName", + "cluster-location": cluster_location, + }, }, - ) - - # pylint: disable=unused-argument - def test_environment_empty_strings(self, getter): - os.environ[FUNCTION_TARGET] = "function" - os.environ[K_SERVICE] = "" - os.environ[K_REVISION] = "" - found_resources = get_cloudfunctions_resources() - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_functions", - "cloud.region": "region", - "faas.instance": "instance_id", - "faas.name": "", - "faas.version": "", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_functions", + } + ) + + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot + + +def test_detects_cloud_run( + snapshot, + fake_metadata: _metadata.Metadata, + monkeypatch: pytest.MonkeyPatch, +): + monkeypatch.setenv("K_CONFIGURATION", "fake-configuration") + monkeypatch.setenv("K_SERVICE", "fake-service") + monkeypatch.setenv("K_REVISION", "fake-revision") + fake_metadata.update( + { + "project": {"projectId": "fakeProject"}, + "instance": { + # this will not be numeric on FaaS + "id": "0087244a", + "region": "projects/233510669999/regions/us-east4", }, - ) - - def test_finding_cloudfunctions_resources(self, getter): - os.environ[FUNCTION_TARGET] = "function" - os.environ[K_SERVICE] = "service" - os.environ[K_REVISION] = "revision" - found_resources = get_cloudfunctions_resources() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - { - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_functions", - "cloud.region": "region", - "faas.instance": "instance_id", - "faas.name": "service", - "faas.version": "revision", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_functions", + } + ) + + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot + + +def test_detects_cloud_functions( + snapshot, + fake_metadata: _metadata.Metadata, + monkeypatch: pytest.MonkeyPatch, +): + monkeypatch.setenv("FUNCTION_TARGET", "fake-function-target") + # Note all K_* environment variables are set since Cloud Functions executes within Cloud + # Run. This tests that the detector can differentiate between them + monkeypatch.setenv("K_CONFIGURATION", "fake-configuration") + monkeypatch.setenv("K_SERVICE", "fake-service") + monkeypatch.setenv("K_REVISION", "fake-revision") + fake_metadata.update( + { + "project": {"projectId": "fakeProject"}, + "instance": { + # this will not be numeric on FaaS + "id": "0087244a", + "region": "projects/233510669999/regions/us-east4", }, - ) - + } + ) + + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot + + +def test_detects_gae_standard( + snapshot, + fake_metadata: _metadata.Metadata, + monkeypatch: pytest.MonkeyPatch, +): + monkeypatch.setenv("GAE_ENV", "standard") + monkeypatch.setenv("GAE_SERVICE", "fake-service") + monkeypatch.setenv("GAE_VERSION", "fake-version") + monkeypatch.setenv("GAE_INSTANCE", "fake-instance") + fake_metadata.update( + { + "project": {"projectId": "fakeProject"}, + "instance": { + "region": "projects/233510669999/regions/us-east4", + "zone": "us-east4-b", + }, + } + ) + + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot + + +def test_detects_gae_flex( + snapshot, + fake_metadata: _metadata.Metadata, + monkeypatch: pytest.MonkeyPatch, +): + monkeypatch.setenv("GAE_SERVICE", "fake-service") + monkeypatch.setenv("GAE_VERSION", "fake-version") + monkeypatch.setenv("GAE_INSTANCE", "fake-instance") + fake_metadata.update( + { + "project": {"projectId": "fakeProject"}, + "instance": { + "zone": "projects/233510669999/zones/us-east4-b", + }, + } + ) -@patch_env -@mock.patch( - "opentelemetry.resourcedetector.gcp_resource_detector.requests.get" -) -class TestGoogleCloudResourceDetector(unittest.TestCase): - def test_finding_gce_resources(self, getter): - # The necessary env variables were not set for GKE resource detection - # to succeed. We should be falling back to detecting GCE resources - pop_environ_key(KUBERNETES_SERVICE_HOST) - pop_environ_key(K_CONFIGURATION) - pop_environ_key(FUNCTION_TARGET) - resource_finder = GoogleCloudResourceDetector() - getter.return_value.json.return_value = GCE_RESOURCES_JSON_STRING - found_resources = resource_finder.detect() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - Resource( - attributes={ - "host.id": "instance_id", - "cloud.provider": "gcp", - "cloud.account.id": "project_id", - "cloud.zone": "zone", - "gcp.resource_type": "gce_instance", - } - ), - ) - self.assertEqual(getter.call_count, 1) - - # Found resources should be cached and not require another network call - found_resources = resource_finder.detect() - self.assertEqual(getter.call_count, 1) - self.assertEqual( - found_resources, - Resource( - attributes={ - "host.id": "instance_id", - "cloud.provider": "gcp", - "cloud.account.id": "project_id", - "cloud.zone": "zone", - "gcp.resource_type": "gce_instance", - } - ), - ) - - def test_finding_gke_resources(self, getter): - # The necessary env variables were set for GKE resource detection - # to succeed. No GCE resource info should be extracted - os.environ[KUBERNETES_SERVICE_HOST] = "10.0.0.1" - os.environ[NAMESPACE] = "namespace" - os.environ[CONTAINER_NAME] = "container_name" - os.environ[HOSTNAME] = "host_name" - - resource_finder = GoogleCloudResourceDetector() - getter.return_value.json.return_value = GKE_RESOURCES_JSON_STRING - found_resources = resource_finder.detect() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - Resource( - attributes={ - "cloud.account.id": "project_id", - "k8s.cluster.name": "cluster_name", - "k8s.namespace.name": "namespace", - "host.id": "instance_id", - "k8s.pod.name": "host_name", - "container.name": "container_name", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "gke_container", - } - ), - ) - self.assertEqual(getter.call_count, 1) - - def test_finding_cloudrun_resources(self, getter): - # The necessary env variables were set for CloudRun resource detection - # to succeed. No GCE resource info should be extracted - os.environ[K_CONFIGURATION] = "cloudrun_config" - os.environ[K_SERVICE] = "service" - os.environ[K_REVISION] = "revision" - - resource_finder = GoogleCloudResourceDetector() - getter.return_value.json.return_value = CLOUDRUN_RESOURCES_JSON_STRING - found_resources = resource_finder.detect() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - Resource( - attributes={ - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_run", - "cloud.region": "region", - "faas.instance": "instance_id", - "faas.name": "service", - "faas.version": "revision", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_run", - } - ), - ) - self.assertEqual(getter.call_count, 1) - - def test_finding_cloudfunctions_resources(self, getter): - # The necessary env variables were set for Cloudfunctions resource detection - # to succeed. No GCE resource info should be extracted - os.environ[FUNCTION_TARGET] = "function" - os.environ[K_SERVICE] = "service" - os.environ[K_REVISION] = "revision" - - resource_finder = GoogleCloudResourceDetector() - getter.return_value.json.return_value = ( - CLOUDFUNCTIONS_RESOURCES_JSON_STRING - ) - found_resources = resource_finder.detect() - self.assertEqual(getter.call_args_list[0][0][0], _GCP_METADATA_URL) - self.assertEqual( - found_resources, - Resource( - attributes={ - "cloud.account.id": "project_id", - "cloud.platform": "gcp_cloud_functions", - "cloud.region": "region", - "faas.instance": "instance_id", - "faas.name": "service", - "faas.version": "revision", - "cloud.zone": "zone", - "cloud.provider": "gcp", - "gcp.resource_type": "cloud_functions", - } - ), - ) - self.assertEqual(getter.call_count, 1) - - def test_resource_finding_fallback(self, getter): - # The environment variables imply its on GKE, but the metadata doesn't - # have GKE information - getter.return_value.json.return_value = GCE_RESOURCES_JSON_STRING - os.environ[CONTAINER_NAME] = "container_name" - - # This detection will cause an error in get_gke_resources and should - # swallow the error and fall back to get_gce_resources - resource_finder = GoogleCloudResourceDetector() - found_resources = resource_finder.detect() - self.assertEqual( - found_resources, - Resource( - attributes={ - "host.id": "instance_id", - "cloud.provider": "gcp", - "cloud.account.id": "project_id", - "cloud.zone": "zone", - "gcp.resource_type": "gce_instance", - } - ), - ) - - def test_no_resources_found(self, getter): - # If no Google resources were found, we throw an exception - getter.return_value.json.side_effect = Exception - - resource_finder = GoogleCloudResourceDetector(raise_on_error=True) - - self.assertRaises(NoGoogleResourcesFound, resource_finder.detect) - - def test_detector_dont_raise_on_error(self, getter): - # If no Google resources were found, we throw an exception - getter.return_value.json.side_effect = Exception - detector = GoogleCloudResourceDetector(raise_on_error=False) - expected_resources = Resource({}) - - resources = detector.detect() - - self.assertEqual(resources, expected_resources) + assert dict(GoogleCloudResourceDetector().detect().attributes) == snapshot diff --git a/samples/instrumentation-quickstart/setup_opentelemetry.py b/samples/instrumentation-quickstart/setup_opentelemetry.py index a6e8d62f..581138d8 100644 --- a/samples/instrumentation-quickstart/setup_opentelemetry.py +++ b/samples/instrumentation-quickstart/setup_opentelemetry.py @@ -12,10 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os - -from opentelemetry.sdk.resources import SERVICE_INSTANCE_ID, SERVICE_NAME, Resource - from opentelemetry import trace from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.trace import TracerProvider @@ -25,13 +21,10 @@ from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader +from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector # [START opentelemetry_instrumentation_setup_opentelemetry] -resource = Resource.create(attributes={ - # Use the PID as the service.instance.id to avoid duplicate timeseries - # from different Gunicorn worker processes. - SERVICE_INSTANCE_ID: f"worker-{os.getpid()}", -}) +resource = GoogleCloudResourceDetector().detect() traceProvider = TracerProvider(resource=resource) processor = BatchSpanProcessor(OTLPSpanExporter()) From a4693999f1f3fd5b62dcb2b8f6aba341bd366137 Mon Sep 17 00:00:00 2001 From: Dylan Russell Date: Wed, 2 Apr 2025 16:59:56 +0000 Subject: [PATCH 3/7] Add telemetry endpoint to trace examples. --- samples/otlptrace/README.md | 1 - samples/otlptrace/example_grpc.py | 1 + samples/otlptrace/example_http.py | 1 + 3 files changed, 2 insertions(+), 1 deletion(-) diff --git a/samples/otlptrace/README.md b/samples/otlptrace/README.md index 80eb3411..fe19da65 100644 --- a/samples/otlptrace/README.md +++ b/samples/otlptrace/README.md @@ -24,7 +24,6 @@ gcloud auth application-default login ```sh # export necessary OTEL environment variables export OTEL_RESOURCE_ATTRIBUTES="gcp.project_id=" -export OTEL_EXPORTER_OTLP_ENDPOINT= # from the samples/otlptrace repository python3 example_grpc.py diff --git a/samples/otlptrace/example_grpc.py b/samples/otlptrace/example_grpc.py index 80980f52..8c59f1f9 100644 --- a/samples/otlptrace/example_grpc.py +++ b/samples/otlptrace/example_grpc.py @@ -46,6 +46,7 @@ processor = BatchSpanProcessor( OTLPSpanExporter( credentials=channel_creds, + endpoint="https://telemetry.googleapis.com:443/v1/traces", ) ) trace_provider.add_span_processor(processor) diff --git a/samples/otlptrace/example_http.py b/samples/otlptrace/example_http.py index a613beed..4694cc01 100644 --- a/samples/otlptrace/example_http.py +++ b/samples/otlptrace/example_http.py @@ -31,6 +31,7 @@ processor = BatchSpanProcessor( OTLPSpanExporter( session=AuthorizedSession(credentials), + endpoint="https://telemetry.googleapis.com:443/v1/traces", ) ) trace_provider.add_span_processor(processor) From e52e07b9636dab1febc7a0525af1c0808f392065 Mon Sep 17 00:00:00 2001 From: Dylan Russell Date: Mon, 14 Jul 2025 20:09:58 +0000 Subject: [PATCH 4/7] Remove obsolete comment --- samples/instrumentation-quickstart/setup_opentelemetry.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/samples/instrumentation-quickstart/setup_opentelemetry.py b/samples/instrumentation-quickstart/setup_opentelemetry.py index 09061bd5..bacdbf77 100644 --- a/samples/instrumentation-quickstart/setup_opentelemetry.py +++ b/samples/instrumentation-quickstart/setup_opentelemetry.py @@ -31,13 +31,6 @@ # [START opentelemetry_instrumentation_setup_opentelemetry] def setup_opentelemetry() -> None: - # resource = Resource.create( - # attributes={ - # # Use the PID as the service.instance.id to avoid duplicate timeseries - # # from different Gunicorn worker processes. - # SERVICE_INSTANCE_ID: f"worker-{os.getpid()}", - # } - # ) resource = GoogleCloudResourceDetector().detect() # Set up OpenTelemetry Python SDK From 250d26db9b3175089c50f2d353ced4a8c6849139 Mon Sep 17 00:00:00 2001 From: Dylan Russell Date: Mon, 14 Jul 2025 20:29:05 +0000 Subject: [PATCH 5/7] Fix e2e test and more samples to use GoogleCloudResourceDetector --- e2e-test-server/e2e_test_server/scenarios.py | 4 +--- samples/otlpmetric/example.py | 4 +--- samples/otlptrace/example_grpc.py | 4 ++-- samples/otlptrace/example_http.py | 4 ++-- 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/e2e-test-server/e2e_test_server/scenarios.py b/e2e-test-server/e2e_test_server/scenarios.py index f0b10b82..1ab87d2d 100644 --- a/e2e-test-server/e2e_test_server/scenarios.py +++ b/e2e-test-server/e2e_test_server/scenarios.py @@ -22,9 +22,7 @@ from opentelemetry.propagators.cloud_trace_propagator import ( CloudTraceFormatPropagator, ) -from opentelemetry.resourcedetector.gcp_resource_detector._detector import ( - GoogleCloudResourceDetector, -) +from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry.sdk.trace.sampling import ALWAYS_ON diff --git a/samples/otlpmetric/example.py b/samples/otlpmetric/example.py index d8693a31..a326a623 100644 --- a/samples/otlpmetric/example.py +++ b/samples/otlpmetric/example.py @@ -24,9 +24,7 @@ from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( OTLPMetricExporter, ) -from opentelemetry.resourcedetector.gcp_resource_detector._detector import ( - GoogleCloudResourceDetector, -) +from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector from opentelemetry.sdk.resources import SERVICE_NAME, Resource, get_aggregated_resources from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader diff --git a/samples/otlptrace/example_grpc.py b/samples/otlptrace/example_grpc.py index 8c59f1f9..02d94f53 100644 --- a/samples/otlptrace/example_grpc.py +++ b/samples/otlptrace/example_grpc.py @@ -24,9 +24,9 @@ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import ( OTLPSpanExporter, ) -from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector """ This is a sample script that exports OTLP traces encoded as protobufs via gRPC. @@ -34,7 +34,7 @@ credentials, _ = google.auth.default() request = google.auth.transport.requests.Request() -resource = Resource.create(attributes={SERVICE_NAME: "otlp-gcp-grpc-sample"}) +resource = GoogleCloudResourceDetector().detect() auth_metadata_plugin = AuthMetadataPlugin(credentials=credentials, request=request) channel_creds = grpc.composite_channel_credentials( diff --git a/samples/otlptrace/example_http.py b/samples/otlptrace/example_http.py index 4694cc01..153e1851 100644 --- a/samples/otlptrace/example_http.py +++ b/samples/otlptrace/example_http.py @@ -19,14 +19,14 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( OTLPSpanExporter, ) +from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector from google.auth.transport.requests import AuthorizedSession -from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor credentials, _ = google.auth.default() trace_provider = TracerProvider( - resource=Resource.create(attributes={SERVICE_NAME: "otlp-gcp-http-sample"}) + resource=GoogleCloudResourceDetector().detect() ) processor = BatchSpanProcessor( OTLPSpanExporter( From c11054cb569ff3284db580efde648b08e81bdd24 Mon Sep 17 00:00:00 2001 From: Dylan Russell Date: Mon, 14 Jul 2025 20:34:06 +0000 Subject: [PATCH 6/7] More sample cleanup --- samples/instrumentation-quickstart/setup_opentelemetry.py | 1 - samples/otlpmetric/example.py | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/samples/instrumentation-quickstart/setup_opentelemetry.py b/samples/instrumentation-quickstart/setup_opentelemetry.py index bacdbf77..e0a415a6 100644 --- a/samples/instrumentation-quickstart/setup_opentelemetry.py +++ b/samples/instrumentation-quickstart/setup_opentelemetry.py @@ -23,7 +23,6 @@ from opentelemetry.sdk._logs.export import BatchLogRecordProcessor from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader -from opentelemetry.sdk.resources import SERVICE_INSTANCE_ID, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor diff --git a/samples/otlpmetric/example.py b/samples/otlpmetric/example.py index a326a623..3944f335 100644 --- a/samples/otlpmetric/example.py +++ b/samples/otlpmetric/example.py @@ -20,12 +20,11 @@ import google.auth.transport.requests import grpc from google.auth.transport.grpc import AuthMetadataPlugin -from opentelemetry import metrics from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( OTLPMetricExporter, ) from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector -from opentelemetry.sdk.resources import SERVICE_NAME, Resource, get_aggregated_resources +from opentelemetry.sdk.resources import get_aggregated_resources from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader From ce7502426c19bd86e03bd239f36d84dded6fec15 Mon Sep 17 00:00:00 2001 From: Dylan Russell Date: Tue, 15 Jul 2025 15:02:28 +0000 Subject: [PATCH 7/7] Fix samples again --- opentelemetry-resourcedetector-gcp/README.rst | 13 ++++++++++--- .../setup_opentelemetry.py | 12 ++++++++---- samples/otlptrace/example_grpc.py | 4 ++-- samples/otlptrace/example_http.py | 4 ++-- 4 files changed, 22 insertions(+), 11 deletions(-) diff --git a/opentelemetry-resourcedetector-gcp/README.rst b/opentelemetry-resourcedetector-gcp/README.rst index fe1bcb3c..596708c6 100644 --- a/opentelemetry-resourcedetector-gcp/README.rst +++ b/opentelemetry-resourcedetector-gcp/README.rst @@ -30,13 +30,20 @@ Usage ------------ .. code:: python - from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry import trace - - resource = GoogleCloudResourceDetector().detect() + from opentelemetry.sdk.resources import SERVICE_INSTANCE_ID, Resource + + # This will use the GooglecloudResourceDetector under the covers. + resource = Resource.create( + attributes={ + # Use the PID as the service.instance.id to avoid duplicate timeseries + # from different Gunicorn worker processes. + SERVICE_INSTANCE_ID: f"worker-{os.getpid()}", + } + ) traceProvider = TracerProvider(resource=resource) processor = BatchSpanProcessor(OTLPSpanExporter()) traceProvider.add_span_processor(processor) diff --git a/samples/instrumentation-quickstart/setup_opentelemetry.py b/samples/instrumentation-quickstart/setup_opentelemetry.py index e0a415a6..55c685c4 100644 --- a/samples/instrumentation-quickstart/setup_opentelemetry.py +++ b/samples/instrumentation-quickstart/setup_opentelemetry.py @@ -23,15 +23,19 @@ from opentelemetry.sdk._logs.export import BatchLogRecordProcessor from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader +from opentelemetry.sdk.resources import SERVICE_INSTANCE_ID, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor -from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector - # [START opentelemetry_instrumentation_setup_opentelemetry] def setup_opentelemetry() -> None: - resource = GoogleCloudResourceDetector().detect() - + resource = Resource.create( + attributes={ + # Use the PID as the service.instance.id to avoid duplicate timeseries + # from different Gunicorn worker processes. + SERVICE_INSTANCE_ID: f"worker-{os.getpid()}", + } + ) # Set up OpenTelemetry Python SDK tracer_provider = TracerProvider(resource=resource) tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) diff --git a/samples/otlptrace/example_grpc.py b/samples/otlptrace/example_grpc.py index 02d94f53..8c59f1f9 100644 --- a/samples/otlptrace/example_grpc.py +++ b/samples/otlptrace/example_grpc.py @@ -24,9 +24,9 @@ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import ( OTLPSpanExporter, ) +from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor -from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector """ This is a sample script that exports OTLP traces encoded as protobufs via gRPC. @@ -34,7 +34,7 @@ credentials, _ = google.auth.default() request = google.auth.transport.requests.Request() -resource = GoogleCloudResourceDetector().detect() +resource = Resource.create(attributes={SERVICE_NAME: "otlp-gcp-grpc-sample"}) auth_metadata_plugin = AuthMetadataPlugin(credentials=credentials, request=request) channel_creds = grpc.composite_channel_credentials( diff --git a/samples/otlptrace/example_http.py b/samples/otlptrace/example_http.py index 153e1851..4694cc01 100644 --- a/samples/otlptrace/example_http.py +++ b/samples/otlptrace/example_http.py @@ -19,14 +19,14 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( OTLPSpanExporter, ) -from opentelemetry.resourcedetector.gcp_resource_detector import GoogleCloudResourceDetector from google.auth.transport.requests import AuthorizedSession +from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor credentials, _ = google.auth.default() trace_provider = TracerProvider( - resource=GoogleCloudResourceDetector().detect() + resource=Resource.create(attributes={SERVICE_NAME: "otlp-gcp-http-sample"}) ) processor = BatchSpanProcessor( OTLPSpanExporter(