Skip to content

Commit a97a85b

Browse files
authored
Add support for exponential histograms (#413)
* Add support for exponential histograms * Fix linting errors * Move mean calculation to where it's used
1 parent 4c02974 commit a97a85b

File tree

3 files changed

+321
-10
lines changed

3 files changed

+321
-10
lines changed

opentelemetry-exporter-gcp-monitoring/src/opentelemetry/exporter/cloud_monitoring/__init__.py

Lines changed: 56 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# limitations under the License.
1414

1515
import logging
16+
import math
1617
import random
1718
from dataclasses import replace
1819
from time import time_ns
@@ -212,11 +213,7 @@ def _get_metric_descriptor(
212213
elif isinstance(data, Histogram):
213214
descriptor.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
214215
elif isinstance(data, ExponentialHistogram):
215-
logger.warning(
216-
"Unsupported metric data type %s, ignoring it",
217-
type(data).__name__,
218-
)
219-
return None
216+
descriptor.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
220217
else:
221218
# Exhaustive check
222219
_: NoReturn = data
@@ -235,6 +232,8 @@ def _get_metric_descriptor(
235232
)
236233
elif isinstance(first_point, HistogramDataPoint):
237234
descriptor.value_type = MetricDescriptor.ValueType.DISTRIBUTION
235+
elif isinstance(first_point, ExponentialHistogramDataPoint):
236+
descriptor.value_type = MetricDescriptor.ValueType.DISTRIBUTION
238237
elif first_point is None:
239238
pass
240239
else:
@@ -265,7 +264,9 @@ def _get_metric_descriptor(
265264
@staticmethod
266265
def _to_point(
267266
kind: "MetricDescriptor.MetricKind.V",
268-
data_point: Union[NumberDataPoint, HistogramDataPoint],
267+
data_point: Union[
268+
NumberDataPoint, HistogramDataPoint, ExponentialHistogramDataPoint
269+
],
269270
) -> Point:
270271
if isinstance(data_point, HistogramDataPoint):
271272
mean = (
@@ -283,6 +284,55 @@ def _to_point(
283284
),
284285
)
285286
)
287+
elif isinstance(data_point, ExponentialHistogramDataPoint):
288+
# Adapted from https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/blob/v1.8.0/exporter/collector/metrics.go#L582
289+
290+
# Calculate underflow bucket (zero count + negative buckets)
291+
underflow = data_point.zero_count
292+
if data_point.negative.bucket_counts:
293+
underflow += sum(data_point.negative.bucket_counts)
294+
295+
# Create bucket counts array: [underflow, positive_buckets..., overflow=0]
296+
bucket_counts = [underflow]
297+
if data_point.positive.bucket_counts:
298+
bucket_counts.extend(data_point.positive.bucket_counts)
299+
bucket_counts.append(0) # overflow bucket is always empty
300+
301+
# Determine bucket options
302+
if not data_point.positive.bucket_counts:
303+
# If no positive buckets, use explicit buckets with bounds=[0]
304+
bucket_options = Distribution.BucketOptions(
305+
explicit_buckets=Distribution.BucketOptions.Explicit(
306+
bounds=[0.0],
307+
)
308+
)
309+
else:
310+
# Use exponential bucket options
311+
# growth_factor = 2^(2^(-scale))
312+
growth_factor = math.pow(2, math.pow(2, -data_point.scale))
313+
# scale = growth_factor^(positive_bucket_offset)
314+
scale = math.pow(growth_factor, data_point.positive.offset)
315+
num_finite_buckets = len(bucket_counts) - 2
316+
317+
bucket_options = Distribution.BucketOptions(
318+
exponential_buckets=Distribution.BucketOptions.Exponential(
319+
num_finite_buckets=num_finite_buckets,
320+
growth_factor=growth_factor,
321+
scale=scale,
322+
)
323+
)
324+
325+
mean = (
326+
data_point.sum / data_point.count if data_point.count else 0.0
327+
)
328+
point_value = TypedValue(
329+
distribution_value=Distribution(
330+
count=data_point.count,
331+
mean=mean,
332+
bucket_counts=bucket_counts,
333+
bucket_options=bucket_options,
334+
)
335+
)
286336
else:
287337
if isinstance(data_point.value, int):
288338
point_value = TypedValue(int64_value=data_point.value)
@@ -350,10 +400,6 @@ def export(
350400
continue
351401

352402
for data_point in metric.data.data_points:
353-
if isinstance(
354-
data_point, ExponentialHistogramDataPoint
355-
):
356-
continue
357403
labels = {
358404
_normalize_label_key(key): str(value)
359405
for key, value in (
Lines changed: 238 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,238 @@
1+
{
2+
"/google.monitoring.v3.MetricService/CreateMetricDescriptor": [
3+
{
4+
"metricDescriptor": {
5+
"description": "foo",
6+
"displayName": "myexponentialhistogram",
7+
"labels": [
8+
{
9+
"key": "string"
10+
},
11+
{
12+
"key": "int"
13+
},
14+
{
15+
"key": "float"
16+
}
17+
],
18+
"metricKind": "CUMULATIVE",
19+
"type": "workload.googleapis.com/myexponentialhistogram",
20+
"unit": "{myunit}",
21+
"valueType": "DISTRIBUTION"
22+
},
23+
"name": "projects/fakeproject"
24+
}
25+
],
26+
"/google.monitoring.v3.MetricService/CreateTimeSeries": [
27+
{
28+
"name": "projects/fakeproject",
29+
"timeSeries": [
30+
{
31+
"metric": {
32+
"labels": {
33+
"float": "123.4",
34+
"int": "123",
35+
"string": "string"
36+
},
37+
"type": "workload.googleapis.com/myexponentialhistogram"
38+
},
39+
"metricKind": "CUMULATIVE",
40+
"points": [
41+
{
42+
"interval": {
43+
"endTime": "str",
44+
"startTime": "str"
45+
},
46+
"value": {
47+
"distributionValue": {
48+
"bucketCounts": [
49+
"0",
50+
"1",
51+
"0",
52+
"0",
53+
"0",
54+
"0",
55+
"0",
56+
"0",
57+
"0",
58+
"0",
59+
"0",
60+
"0",
61+
"0",
62+
"0",
63+
"0",
64+
"0",
65+
"0",
66+
"0",
67+
"0",
68+
"0",
69+
"0",
70+
"0",
71+
"0",
72+
"0",
73+
"0",
74+
"0",
75+
"0",
76+
"0",
77+
"0",
78+
"0",
79+
"0",
80+
"0",
81+
"0",
82+
"1",
83+
"0",
84+
"0",
85+
"0",
86+
"0",
87+
"0",
88+
"0",
89+
"0",
90+
"0",
91+
"0",
92+
"0",
93+
"0",
94+
"0",
95+
"0",
96+
"0",
97+
"0",
98+
"0",
99+
"0",
100+
"0",
101+
"1",
102+
"0",
103+
"0",
104+
"0",
105+
"0",
106+
"0",
107+
"0",
108+
"0",
109+
"0",
110+
"0",
111+
"0",
112+
"0",
113+
"0",
114+
"1",
115+
"0",
116+
"0",
117+
"0",
118+
"0",
119+
"0",
120+
"0",
121+
"0",
122+
"0",
123+
"0",
124+
"0",
125+
"0",
126+
"0",
127+
"0",
128+
"0",
129+
"0",
130+
"0",
131+
"0",
132+
"0",
133+
"1",
134+
"0",
135+
"0",
136+
"0",
137+
"0",
138+
"0",
139+
"0",
140+
"0",
141+
"0",
142+
"0",
143+
"0",
144+
"0",
145+
"0",
146+
"1",
147+
"0",
148+
"0",
149+
"0",
150+
"0",
151+
"0",
152+
"0",
153+
"0",
154+
"0",
155+
"0",
156+
"0",
157+
"0",
158+
"0",
159+
"0",
160+
"0",
161+
"0",
162+
"0",
163+
"0",
164+
"0",
165+
"1",
166+
"0",
167+
"0",
168+
"0",
169+
"0",
170+
"0",
171+
"0",
172+
"0",
173+
"0",
174+
"0",
175+
"0",
176+
"0",
177+
"0",
178+
"0",
179+
"0",
180+
"0",
181+
"0",
182+
"0",
183+
"0",
184+
"0",
185+
"0",
186+
"0",
187+
"0",
188+
"0",
189+
"0",
190+
"0",
191+
"0",
192+
"0",
193+
"0",
194+
"0",
195+
"0",
196+
"0",
197+
"0",
198+
"0",
199+
"0",
200+
"0",
201+
"0",
202+
"0",
203+
"0",
204+
"0",
205+
"0",
206+
"0",
207+
"0",
208+
"0",
209+
"0",
210+
"0"
211+
],
212+
"bucketOptions": {
213+
"exponentialBuckets": {
214+
"growthFactor": 1.0218971486541166,
215+
"numFiniteBuckets": 160,
216+
"scale": 24.67537320652687
217+
}
218+
},
219+
"count": "7",
220+
"mean": 128.57142857142858
221+
}
222+
}
223+
}
224+
],
225+
"resource": {
226+
"labels": {
227+
"location": "global",
228+
"namespace": "",
229+
"node_id": ""
230+
},
231+
"type": "generic_node"
232+
},
233+
"unit": "{myunit}"
234+
}
235+
]
236+
}
237+
]
238+
}

opentelemetry-exporter-gcp-monitoring/tests/test_cloud_monitoring.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
from opentelemetry.metrics import CallbackOptions, Observation
4141
from opentelemetry.sdk.metrics.view import (
4242
ExplicitBucketHistogramAggregation,
43+
ExponentialBucketHistogramAggregation,
4344
View,
4445
)
4546
from opentelemetry.sdk.resources import Resource
@@ -140,6 +141,32 @@ def test_histogram_single_bucket(
140141
assert gcmfake.get_calls() == snapshot_gcmcalls
141142

142143

144+
def test_exponential_histogram(
145+
gcmfake_meter_provider: GcmFakeMeterProvider,
146+
gcmfake: GcmFake,
147+
snapshot_gcmcalls,
148+
) -> None:
149+
meter_provider = gcmfake_meter_provider(
150+
views=[
151+
View(
152+
instrument_name="myexponentialhistogram",
153+
aggregation=ExponentialBucketHistogramAggregation(
154+
max_size=160, max_scale=20
155+
),
156+
)
157+
]
158+
)
159+
histogram = meter_provider.get_meter(__name__).create_histogram(
160+
"myexponentialhistogram", description="foo", unit="{myunit}"
161+
)
162+
163+
for value in [100, 50, 200, 25, 300, 75, 150]:
164+
histogram.record(value, LABELS)
165+
166+
meter_provider.force_flush()
167+
assert gcmfake.get_calls() == snapshot_gcmcalls
168+
169+
143170
@pytest.mark.parametrize(
144171
"value", [pytest.param(123, id="int"), pytest.param(45.6, id="float")]
145172
)

0 commit comments

Comments
 (0)