Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.

import logging
import math
import random
from dataclasses import replace
from time import time_ns
Expand Down Expand Up @@ -212,11 +213,7 @@ def _get_metric_descriptor(
elif isinstance(data, Histogram):
descriptor.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
elif isinstance(data, ExponentialHistogram):
logger.warning(
"Unsupported metric data type %s, ignoring it",
type(data).__name__,
)
return None
descriptor.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
else:
# Exhaustive check
_: NoReturn = data
Expand All @@ -235,6 +232,8 @@ def _get_metric_descriptor(
)
elif isinstance(first_point, HistogramDataPoint):
descriptor.value_type = MetricDescriptor.ValueType.DISTRIBUTION
elif isinstance(first_point, ExponentialHistogramDataPoint):
descriptor.value_type = MetricDescriptor.ValueType.DISTRIBUTION
elif first_point is None:
pass
else:
Expand Down Expand Up @@ -265,7 +264,9 @@ def _get_metric_descriptor(
@staticmethod
def _to_point(
kind: "MetricDescriptor.MetricKind.V",
data_point: Union[NumberDataPoint, HistogramDataPoint],
data_point: Union[
NumberDataPoint, HistogramDataPoint, ExponentialHistogramDataPoint
],
) -> Point:
if isinstance(data_point, HistogramDataPoint):
mean = (
Expand All @@ -283,6 +284,55 @@ def _to_point(
),
)
)
elif isinstance(data_point, ExponentialHistogramDataPoint):
# Adapted from https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/blob/v1.8.0/exporter/collector/metrics.go#L582

# Calculate underflow bucket (zero count + negative buckets)
underflow = data_point.zero_count
if data_point.negative.bucket_counts:
underflow += sum(data_point.negative.bucket_counts)

# Create bucket counts array: [underflow, positive_buckets..., overflow=0]
bucket_counts = [underflow]
if data_point.positive.bucket_counts:
bucket_counts.extend(data_point.positive.bucket_counts)
bucket_counts.append(0) # overflow bucket is always empty

# Determine bucket options
if not data_point.positive.bucket_counts:
# If no positive buckets, use explicit buckets with bounds=[0]
bucket_options = Distribution.BucketOptions(
explicit_buckets=Distribution.BucketOptions.Explicit(
bounds=[0.0],
)
)
else:
# Use exponential bucket options
# growth_factor = 2^(2^(-scale))
growth_factor = math.pow(2, math.pow(2, -data_point.scale))
# scale = growth_factor^(positive_bucket_offset)
scale = math.pow(growth_factor, data_point.positive.offset)
num_finite_buckets = len(bucket_counts) - 2

bucket_options = Distribution.BucketOptions(
exponential_buckets=Distribution.BucketOptions.Exponential(
num_finite_buckets=num_finite_buckets,
growth_factor=growth_factor,
scale=scale,
)
)

mean = (
data_point.sum / data_point.count if data_point.count else 0.0
)
point_value = TypedValue(
distribution_value=Distribution(
count=data_point.count,
mean=mean,
bucket_counts=bucket_counts,
bucket_options=bucket_options,
)
)
else:
if isinstance(data_point.value, int):
point_value = TypedValue(int64_value=data_point.value)
Expand Down Expand Up @@ -350,10 +400,6 @@ def export(
continue

for data_point in metric.data.data_points:
if isinstance(
data_point, ExponentialHistogramDataPoint
):
continue
labels = {
_normalize_label_key(key): str(value)
for key, value in (
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
{
"/google.monitoring.v3.MetricService/CreateMetricDescriptor": [
{
"metricDescriptor": {
"description": "foo",
"displayName": "myexponentialhistogram",
"labels": [
{
"key": "string"
},
{
"key": "int"
},
{
"key": "float"
}
],
"metricKind": "CUMULATIVE",
"type": "workload.googleapis.com/myexponentialhistogram",
"unit": "{myunit}",
"valueType": "DISTRIBUTION"
},
"name": "projects/fakeproject"
}
],
"/google.monitoring.v3.MetricService/CreateTimeSeries": [
{
"name": "projects/fakeproject",
"timeSeries": [
{
"metric": {
"labels": {
"float": "123.4",
"int": "123",
"string": "string"
},
"type": "workload.googleapis.com/myexponentialhistogram"
},
"metricKind": "CUMULATIVE",
"points": [
{
"interval": {
"endTime": "str",
"startTime": "str"
},
"value": {
"distributionValue": {
"bucketCounts": [
"0",
"1",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0"
],
"bucketOptions": {
"exponentialBuckets": {
"growthFactor": 1.0218971486541166,
"numFiniteBuckets": 160,
"scale": 24.67537320652687
}
},
"count": "7",
"mean": 128.57142857142858
}
}
}
],
"resource": {
"labels": {
"location": "global",
"namespace": "",
"node_id": ""
},
"type": "generic_node"
},
"unit": "{myunit}"
}
]
}
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from opentelemetry.metrics import CallbackOptions, Observation
from opentelemetry.sdk.metrics.view import (
ExplicitBucketHistogramAggregation,
ExponentialBucketHistogramAggregation,
View,
)
from opentelemetry.sdk.resources import Resource
Expand Down Expand Up @@ -140,6 +141,32 @@ def test_histogram_single_bucket(
assert gcmfake.get_calls() == snapshot_gcmcalls


def test_exponential_histogram(
gcmfake_meter_provider: GcmFakeMeterProvider,
gcmfake: GcmFake,
snapshot_gcmcalls,
) -> None:
meter_provider = gcmfake_meter_provider(
views=[
View(
instrument_name="myexponentialhistogram",
aggregation=ExponentialBucketHistogramAggregation(
max_size=160, max_scale=20
),
)
]
)
histogram = meter_provider.get_meter(__name__).create_histogram(
"myexponentialhistogram", description="foo", unit="{myunit}"
)

for value in [100, 50, 200, 25, 300, 75, 150]:
histogram.record(value, LABELS)

meter_provider.force_flush()
assert gcmfake.get_calls() == snapshot_gcmcalls


@pytest.mark.parametrize(
"value", [pytest.param(123, id="int"), pytest.param(45.6, id="float")]
)
Expand Down