Skip to content

Commit e8fbb08

Browse files
ocelotlaabmass
andauthored
Refactor metric format (#2658)
* Refactor metric format Fixes #2646 * Do not overwrite pb2_scope_metrics * Refactor for loops * Add multiple scope test case * Fix interfaces * Fix docs * Update exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py Co-authored-by: Aaron Abbott <[email protected]> * Fix lint * Remove resource check * Remove instrumentation_scope check * Group metrics by instrumentation scopes in the SDK * Remove label_keyss * Use strings instead of mocks * Return generator instead of a list * Fix lint * Rename variables Co-authored-by: Aaron Abbott <[email protected]>
1 parent a821311 commit e8fbb08

File tree

21 files changed

+1430
-1161
lines changed

21 files changed

+1430
-1161
lines changed

exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/_metric_exporter/__init__.py

Lines changed: 107 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313

14-
import logging
14+
from logging import getLogger
1515
from os import environ
1616
from typing import Optional, Sequence
1717
from grpc import ChannelCredentials, Compression
@@ -40,9 +40,10 @@
4040
from opentelemetry.sdk._metrics.export import (
4141
MetricExporter,
4242
MetricExportResult,
43+
MetricsData,
4344
)
4445

45-
logger = logging.getLogger(__name__)
46+
_logger = getLogger(__name__)
4647

4748

4849
class OTLPMetricExporter(
@@ -79,103 +80,127 @@ def __init__(
7980
)
8081

8182
def _translate_data(
82-
self, data: Sequence[Metric]
83+
self, data: MetricsData
8384
) -> ExportMetricsServiceRequest:
84-
sdk_resource_scope_metrics = {}
85-
86-
for metric in data:
87-
resource = metric.resource
88-
scope_map = sdk_resource_scope_metrics.get(resource, {})
89-
if not scope_map:
90-
sdk_resource_scope_metrics[resource] = scope_map
91-
92-
scope_metrics = scope_map.get(metric.instrumentation_scope)
93-
94-
if not scope_metrics:
95-
if metric.instrumentation_scope is not None:
96-
scope_map[metric.instrumentation_scope] = pb2.ScopeMetrics(
97-
scope=InstrumentationScope(
98-
name=metric.instrumentation_scope.name,
99-
version=metric.instrumentation_scope.version,
100-
)
101-
)
102-
else:
103-
scope_map[
104-
metric.instrumentation_scope
105-
] = pb2.ScopeMetrics()
10685

107-
scope_metrics = scope_map.get(metric.instrumentation_scope)
86+
resource_metrics_dict = {}
10887

109-
pbmetric = pb2.Metric(
110-
name=metric.name,
111-
description=metric.description,
112-
unit=metric.unit,
113-
)
114-
if isinstance(metric.point, Gauge):
115-
pt = pb2.NumberDataPoint(
116-
attributes=self._translate_attributes(metric.attributes),
117-
time_unix_nano=metric.point.time_unix_nano,
118-
)
119-
if isinstance(metric.point.value, int):
120-
pt.as_int = metric.point.value
121-
else:
122-
pt.as_double = metric.point.value
123-
pbmetric.gauge.data_points.append(pt)
124-
elif isinstance(metric.point, Histogram):
125-
pt = pb2.HistogramDataPoint(
126-
attributes=self._translate_attributes(metric.attributes),
127-
time_unix_nano=metric.point.time_unix_nano,
128-
start_time_unix_nano=metric.point.start_time_unix_nano,
129-
count=sum(metric.point.bucket_counts),
130-
sum=metric.point.sum,
131-
bucket_counts=metric.point.bucket_counts,
132-
explicit_bounds=metric.point.explicit_bounds,
133-
)
134-
pbmetric.histogram.aggregation_temporality = (
135-
metric.point.aggregation_temporality
136-
)
137-
pbmetric.histogram.data_points.append(pt)
138-
elif isinstance(metric.point, Sum):
139-
pt = pb2.NumberDataPoint(
140-
attributes=self._translate_attributes(metric.attributes),
141-
start_time_unix_nano=metric.point.start_time_unix_nano,
142-
time_unix_nano=metric.point.time_unix_nano,
143-
)
144-
if isinstance(metric.point.value, int):
145-
pt.as_int = metric.point.value
146-
else:
147-
pt.as_double = metric.point.value
148-
# note that because sum is a message type, the fields must be
149-
# set individually rather than instantiating a pb2.Sum and setting
150-
# it once
151-
pbmetric.sum.aggregation_temporality = (
152-
metric.point.aggregation_temporality
88+
for resource_metrics in data.resource_metrics:
89+
90+
resource = resource_metrics.resource
91+
92+
# It is safe to assume that each entry in data.resource_metrics is
93+
# associated with an unique resource.
94+
scope_metrics_dict = {}
95+
96+
resource_metrics_dict[resource] = scope_metrics_dict
97+
98+
for scope_metrics in resource_metrics.scope_metrics:
99+
100+
instrumentation_scope = scope_metrics.scope
101+
102+
# The SDK groups metrics in instrumentation scopes already so
103+
# there is no need to check for existing instrumentation scopes
104+
# here.
105+
pb2_scope_metrics = pb2.ScopeMetrics(
106+
scope=InstrumentationScope(
107+
name=instrumentation_scope.name,
108+
version=instrumentation_scope.version,
109+
)
153110
)
154-
pbmetric.sum.is_monotonic = metric.point.is_monotonic
155-
pbmetric.sum.data_points.append(pt)
156-
else:
157-
logger.warn("unsupported datapoint type %s", metric.point)
158-
continue
159-
160-
scope_metrics.metrics.append(
161-
pbmetric,
162-
)
111+
112+
scope_metrics_dict[instrumentation_scope] = pb2_scope_metrics
113+
114+
for metric in scope_metrics.metrics:
115+
pb2_metric = pb2.Metric(
116+
name=metric.name,
117+
description=metric.description,
118+
unit=metric.unit,
119+
)
120+
121+
if isinstance(metric.data, Gauge):
122+
for data_point in metric.data.data_points:
123+
pt = pb2.NumberDataPoint(
124+
attributes=self._translate_attributes(
125+
data_point.attributes
126+
),
127+
time_unix_nano=data_point.time_unix_nano,
128+
)
129+
if isinstance(data_point.value, int):
130+
pt.as_int = data_point.value
131+
else:
132+
pt.as_double = data_point.value
133+
pb2_metric.gauge.data_points.append(pt)
134+
135+
elif isinstance(metric.data, Histogram):
136+
for data_point in metric.data.data_points:
137+
pt = pb2.HistogramDataPoint(
138+
attributes=self._translate_attributes(
139+
data_point.attributes
140+
),
141+
time_unix_nano=data_point.time_unix_nano,
142+
start_time_unix_nano=(
143+
data_point.start_time_unix_nano
144+
),
145+
count=data_point.count,
146+
sum=data_point.sum,
147+
bucket_counts=data_point.bucket_counts,
148+
explicit_bounds=data_point.explicit_bounds,
149+
)
150+
pb2_metric.histogram.aggregation_temporality = (
151+
metric.data.aggregation_temporality
152+
)
153+
pb2_metric.histogram.data_points.append(pt)
154+
155+
elif isinstance(metric.data, Sum):
156+
for data_point in metric.data.data_points:
157+
pt = pb2.NumberDataPoint(
158+
attributes=self._translate_attributes(
159+
data_point.attributes
160+
),
161+
start_time_unix_nano=(
162+
data_point.start_time_unix_nano
163+
),
164+
time_unix_nano=data_point.time_unix_nano,
165+
)
166+
if isinstance(data_point.value, int):
167+
pt.as_int = data_point.value
168+
else:
169+
pt.as_double = data_point.value
170+
# note that because sum is a message type, the
171+
# fields must be set individually rather than
172+
# instantiating a pb2.Sum and setting it once
173+
pb2_metric.sum.aggregation_temporality = (
174+
metric.data.aggregation_temporality
175+
)
176+
pb2_metric.sum.is_monotonic = (
177+
metric.data.is_monotonic
178+
)
179+
pb2_metric.sum.data_points.append(pt)
180+
else:
181+
_logger.warn(
182+
"unsupported datapoint type %s", metric.point
183+
)
184+
continue
185+
186+
pb2_scope_metrics.metrics.append(pb2_metric)
187+
163188
return ExportMetricsServiceRequest(
164189
resource_metrics=get_resource_data(
165-
sdk_resource_scope_metrics,
190+
resource_metrics_dict,
166191
pb2.ResourceMetrics,
167192
"metrics",
168193
)
169194
)
170195

171196
def export(
172197
self,
173-
metrics: Sequence[Metric],
198+
metrics_data: MetricsData,
174199
timeout_millis: float = 10_000,
175200
**kwargs,
176201
) -> MetricExportResult:
177202
# TODO(#2663): OTLPExporterMixin should pass timeout to gRPC
178-
return self._export(metrics)
203+
return self._export(metrics_data)
179204

180205
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
181206
pass

0 commit comments

Comments
 (0)