Skip to content

Commit fb5307c

Browse files
committed
Add support for exponential histograms
1 parent 4c02974 commit fb5307c

File tree

3 files changed

+325
-16
lines changed

3 files changed

+325
-16
lines changed

opentelemetry-exporter-gcp-monitoring/src/opentelemetry/exporter/cloud_monitoring/__init__.py

Lines changed: 60 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# limitations under the License.
1414

1515
import logging
16+
import math
1617
import random
1718
from dataclasses import replace
1819
from time import time_ns
@@ -136,9 +137,7 @@ def __init__(
136137
self._metric_descriptors: Dict[str, MetricDescriptor] = {}
137138
self.unique_identifier = None
138139
if add_unique_identifier:
139-
self.unique_identifier = "{:08x}".format(
140-
random.randint(0, 16**8)
141-
)
140+
self.unique_identifier = "{:08x}".format(random.randint(0, 16**8))
142141

143142
(
144143
self._exporter_start_time_seconds,
@@ -212,11 +211,7 @@ def _get_metric_descriptor(
212211
elif isinstance(data, Histogram):
213212
descriptor.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
214213
elif isinstance(data, ExponentialHistogram):
215-
logger.warning(
216-
"Unsupported metric data type %s, ignoring it",
217-
type(data).__name__,
218-
)
219-
return None
214+
descriptor.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
220215
else:
221216
# Exhaustive check
222217
_: NoReturn = data
@@ -235,6 +230,8 @@ def _get_metric_descriptor(
235230
)
236231
elif isinstance(first_point, HistogramDataPoint):
237232
descriptor.value_type = MetricDescriptor.ValueType.DISTRIBUTION
233+
elif isinstance(first_point, ExponentialHistogramDataPoint):
234+
descriptor.value_type = MetricDescriptor.ValueType.DISTRIBUTION
238235
elif first_point is None:
239236
pass
240237
else:
@@ -265,7 +262,9 @@ def _get_metric_descriptor(
265262
@staticmethod
266263
def _to_point(
267264
kind: "MetricDescriptor.MetricKind.V",
268-
data_point: Union[NumberDataPoint, HistogramDataPoint],
265+
data_point: Union[
266+
NumberDataPoint, HistogramDataPoint, ExponentialHistogramDataPoint
267+
],
269268
) -> Point:
270269
if isinstance(data_point, HistogramDataPoint):
271270
mean = (
@@ -283,6 +282,55 @@ def _to_point(
283282
),
284283
)
285284
)
285+
elif isinstance(data_point, ExponentialHistogramDataPoint):
286+
# Adapted from https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/blob/v1.8.0/exporter/collector/metrics.go#L582
287+
mean = (
288+
data_point.sum / data_point.count if data_point.count else 0.0
289+
)
290+
291+
# Calculate underflow bucket (zero count + negative buckets)
292+
underflow = data_point.zero_count
293+
if data_point.negative.bucket_counts:
294+
underflow += sum(data_point.negative.bucket_counts)
295+
296+
# Create bucket counts array: [underflow, positive_buckets..., overflow=0]
297+
bucket_counts = [underflow]
298+
if data_point.positive.bucket_counts:
299+
bucket_counts.extend(data_point.positive.bucket_counts)
300+
bucket_counts.append(0) # overflow bucket is always empty
301+
302+
# Determine bucket options
303+
if not data_point.positive.bucket_counts:
304+
# If no positive buckets, use explicit buckets with bounds=[0]
305+
bucket_options = Distribution.BucketOptions(
306+
explicit_buckets=Distribution.BucketOptions.Explicit(
307+
bounds=[0.0],
308+
)
309+
)
310+
else:
311+
# Use exponential bucket options
312+
# growth_factor = 2^(2^(-scale))
313+
growth_factor = math.pow(2, math.pow(2, -data_point.scale))
314+
# scale = growth_factor^(positive_bucket_offset)
315+
scale = math.pow(growth_factor, data_point.positive.offset)
316+
num_finite_buckets = len(bucket_counts) - 2
317+
318+
bucket_options = Distribution.BucketOptions(
319+
exponential_buckets=Distribution.BucketOptions.Exponential(
320+
num_finite_buckets=num_finite_buckets,
321+
growth_factor=growth_factor,
322+
scale=scale,
323+
)
324+
)
325+
326+
point_value = TypedValue(
327+
distribution_value=Distribution(
328+
count=data_point.count,
329+
mean=mean,
330+
bucket_counts=bucket_counts,
331+
bucket_options=bucket_options,
332+
)
333+
)
286334
else:
287335
if isinstance(data_point.value, int):
288336
point_value = TypedValue(int64_value=data_point.value)
@@ -350,20 +398,16 @@ def export(
350398
continue
351399

352400
for data_point in metric.data.data_points:
353-
if isinstance(
354-
data_point, ExponentialHistogramDataPoint
355-
):
356-
continue
357401
labels = {
358402
_normalize_label_key(key): str(value)
359403
for key, value in (
360404
data_point.attributes or {}
361405
).items()
362406
}
363407
if self.unique_identifier:
364-
labels[
365-
UNIQUE_IDENTIFIER_KEY
366-
] = self.unique_identifier
408+
labels[UNIQUE_IDENTIFIER_KEY] = (
409+
self.unique_identifier
410+
)
367411
point = self._to_point(
368412
descriptor.metric_kind, data_point
369413
)
Lines changed: 238 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,238 @@
1+
{
2+
"/google.monitoring.v3.MetricService/CreateMetricDescriptor": [
3+
{
4+
"metricDescriptor": {
5+
"description": "foo",
6+
"displayName": "myexponentialhistogram",
7+
"labels": [
8+
{
9+
"key": "string"
10+
},
11+
{
12+
"key": "int"
13+
},
14+
{
15+
"key": "float"
16+
}
17+
],
18+
"metricKind": "CUMULATIVE",
19+
"type": "workload.googleapis.com/myexponentialhistogram",
20+
"unit": "{myunit}",
21+
"valueType": "DISTRIBUTION"
22+
},
23+
"name": "projects/fakeproject"
24+
}
25+
],
26+
"/google.monitoring.v3.MetricService/CreateTimeSeries": [
27+
{
28+
"name": "projects/fakeproject",
29+
"timeSeries": [
30+
{
31+
"metric": {
32+
"labels": {
33+
"float": "123.4",
34+
"int": "123",
35+
"string": "string"
36+
},
37+
"type": "workload.googleapis.com/myexponentialhistogram"
38+
},
39+
"metricKind": "CUMULATIVE",
40+
"points": [
41+
{
42+
"interval": {
43+
"endTime": "str",
44+
"startTime": "str"
45+
},
46+
"value": {
47+
"distributionValue": {
48+
"bucketCounts": [
49+
"0",
50+
"1",
51+
"0",
52+
"0",
53+
"0",
54+
"0",
55+
"0",
56+
"0",
57+
"0",
58+
"0",
59+
"0",
60+
"0",
61+
"0",
62+
"0",
63+
"0",
64+
"0",
65+
"0",
66+
"0",
67+
"0",
68+
"0",
69+
"0",
70+
"0",
71+
"0",
72+
"0",
73+
"0",
74+
"0",
75+
"0",
76+
"0",
77+
"0",
78+
"0",
79+
"0",
80+
"0",
81+
"0",
82+
"1",
83+
"0",
84+
"0",
85+
"0",
86+
"0",
87+
"0",
88+
"0",
89+
"0",
90+
"0",
91+
"0",
92+
"0",
93+
"0",
94+
"0",
95+
"0",
96+
"0",
97+
"0",
98+
"0",
99+
"0",
100+
"0",
101+
"1",
102+
"0",
103+
"0",
104+
"0",
105+
"0",
106+
"0",
107+
"0",
108+
"0",
109+
"0",
110+
"0",
111+
"0",
112+
"0",
113+
"0",
114+
"1",
115+
"0",
116+
"0",
117+
"0",
118+
"0",
119+
"0",
120+
"0",
121+
"0",
122+
"0",
123+
"0",
124+
"0",
125+
"0",
126+
"0",
127+
"0",
128+
"0",
129+
"0",
130+
"0",
131+
"0",
132+
"0",
133+
"1",
134+
"0",
135+
"0",
136+
"0",
137+
"0",
138+
"0",
139+
"0",
140+
"0",
141+
"0",
142+
"0",
143+
"0",
144+
"0",
145+
"0",
146+
"1",
147+
"0",
148+
"0",
149+
"0",
150+
"0",
151+
"0",
152+
"0",
153+
"0",
154+
"0",
155+
"0",
156+
"0",
157+
"0",
158+
"0",
159+
"0",
160+
"0",
161+
"0",
162+
"0",
163+
"0",
164+
"0",
165+
"1",
166+
"0",
167+
"0",
168+
"0",
169+
"0",
170+
"0",
171+
"0",
172+
"0",
173+
"0",
174+
"0",
175+
"0",
176+
"0",
177+
"0",
178+
"0",
179+
"0",
180+
"0",
181+
"0",
182+
"0",
183+
"0",
184+
"0",
185+
"0",
186+
"0",
187+
"0",
188+
"0",
189+
"0",
190+
"0",
191+
"0",
192+
"0",
193+
"0",
194+
"0",
195+
"0",
196+
"0",
197+
"0",
198+
"0",
199+
"0",
200+
"0",
201+
"0",
202+
"0",
203+
"0",
204+
"0",
205+
"0",
206+
"0",
207+
"0",
208+
"0",
209+
"0",
210+
"0"
211+
],
212+
"bucketOptions": {
213+
"exponentialBuckets": {
214+
"growthFactor": 1.0218971486541166,
215+
"numFiniteBuckets": 160,
216+
"scale": 24.67537320652687
217+
}
218+
},
219+
"count": "7",
220+
"mean": 128.57142857142858
221+
}
222+
}
223+
}
224+
],
225+
"resource": {
226+
"labels": {
227+
"location": "global",
228+
"namespace": "",
229+
"node_id": ""
230+
},
231+
"type": "generic_node"
232+
},
233+
"unit": "{myunit}"
234+
}
235+
]
236+
}
237+
]
238+
}

0 commit comments

Comments
 (0)