Skip to content

Commit bad614b

Browse files
Fix pb2.* repeatedfield assignments
1 parent 7fa9679 commit bad614b

File tree

1 file changed

+74
-52
lines changed
  • exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter

1 file changed

+74
-52
lines changed

exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py

Lines changed: 74 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -308,42 +308,43 @@ def _split_metrics_data(
308308

309309
for resource_metrics in metrics_data.resource_metrics:
310310
split_scope_metrics: List[pb2.ScopeMetrics] = []
311-
split_resource_metrics.append(
312-
pb2.ResourceMetrics(
311+
new_resource_metrics = pb2.ResourceMetrics(
313312
resource=resource_metrics.resource,
314313
scope_metrics=split_scope_metrics,
315314
schema_url=resource_metrics.schema_url,
316315
)
317-
)
316+
split_resource_metrics.append(new_resource_metrics)
317+
318318
for scope_metrics in resource_metrics.scope_metrics:
319319
split_metrics: List[pb2.Metric] = []
320-
split_scope_metrics.append(
321-
pb2.ScopeMetrics(
320+
new_scope_metrics = pb2.ScopeMetrics(
322321
scope=scope_metrics.scope,
323322
metrics=split_metrics,
324323
schema_url=scope_metrics.schema_url,
325324
)
326-
)
325+
split_scope_metrics.append(new_scope_metrics)
326+
327327
for metric in scope_metrics.metrics:
328328
# protobuf requires specific metrics types (e.g. Sum, Histogram)
329329
# without definition of DataPointT like gRPC
330330

331331
if metric.HasField("sum"):
332332
split_data_points = []
333-
split_metrics.append(
334-
pb2.Metric(
335-
name=metric.name,
336-
description=metric.description,
337-
unit=metric.unit,
338-
sum=pb2.Sum(
339-
data_points=split_data_points,
340-
aggregation_temporality=metric.sum.aggregation_temporality,
341-
is_monotonic=metric.sum.is_monotonic
342-
)
333+
new_metric = pb2.Metric(
334+
name=metric.name,
335+
description=metric.description,
336+
unit=metric.unit,
337+
sum=pb2.Sum(
338+
data_points=[],
339+
aggregation_temporality=metric.sum.aggregation_temporality,
340+
is_monotonic=metric.sum.is_monotonic
343341
)
344342
)
343+
split_metrics.append(new_metric)
344+
345345
for data_point in metric.sum.data_points:
346346
split_data_points.append(data_point)
347+
new_metric.sum.data_points.append(data_point)
347348
batch_size += 1
348349

349350
if batch_size >= self._max_export_batch_size:
@@ -380,21 +381,24 @@ def _split_metrics_data(
380381
)
381382
]
382383

384+
# Update scope metrics after all data_points added to metric
385+
new_scope_metrics.metrics.append(new_metric)
386+
383387
elif metric.HasField("histogram"):
384388
split_data_points = []
385-
split_metrics.append(
386-
pb2.Metric(
387-
name=metric.name,
388-
description=metric.description,
389-
unit=metric.unit,
390-
histogram=pb2.Histogram(
391-
data_points=split_data_points,
392-
aggregation_temporality=metric.histogram.aggregation_temporality,
393-
),
394-
)
389+
new_metric = pb2.Metric(
390+
name=metric.name,
391+
description=metric.description,
392+
unit=metric.unit,
393+
histogram=pb2.Histogram(
394+
data_points=split_data_points,
395+
aggregation_temporality=metric.histogram.aggregation_temporality,
396+
),
395397
)
398+
split_metrics.append(new_metric)
396399
for data_point in metric.histogram.data_points:
397400
split_data_points.append(data_point)
401+
new_metric.sum.data_points.append(data_point)
398402
batch_size += 1
399403

400404
if batch_size >= self._max_export_batch_size:
@@ -430,21 +434,25 @@ def _split_metrics_data(
430434
)
431435
]
432436

437+
# Update scope metrics after all data_points added to metric
438+
new_scope_metrics.metrics.append(new_metric)
439+
433440
elif metric.HasField("exponential_histogram"):
434441
split_data_points = []
435-
split_metrics.append(
436-
pb2.Metric(
437-
name=metric.name,
438-
description=metric.description,
439-
unit=metric.unit,
440-
exponential_histogram=pb2.ExponentialHistogram(
441-
data_points=split_data_points,
442-
aggregation_temporality=metric.exponential_histogram.aggregation_temporality,
443-
),
444-
)
442+
new_metric = pb2.Metric(
443+
name=metric.name,
444+
description=metric.description,
445+
unit=metric.unit,
446+
exponential_histogram=pb2.ExponentialHistogram(
447+
data_points=split_data_points,
448+
aggregation_temporality=metric.exponential_histogram.aggregation_temporality,
449+
),
445450
)
451+
split_metrics.append(new_metric)
452+
446453
for data_point in metric.exponential_histogram.data_points:
447454
split_data_points.append(data_point)
455+
new_metric.sum.data_points.append(data_point)
448456
batch_size += 1
449457

450458
if batch_size >= self._max_export_batch_size:
@@ -480,20 +488,24 @@ def _split_metrics_data(
480488
)
481489
]
482490

491+
# Update scope metrics after all data_points added to metric
492+
new_scope_metrics.metrics.append(new_metric)
493+
483494
elif metric.HasField("gauge"):
484495
split_data_points = []
485-
split_metrics.append(
486-
pb2.Metric(
487-
name=metric.name,
488-
description=metric.description,
489-
unit=metric.unit,
490-
gauge=pb2.Gauge(
491-
data_points=split_data_points,
492-
)
496+
new_metric = pb2.Metric(
497+
name=metric.name,
498+
description=metric.description,
499+
unit=metric.unit,
500+
gauge=pb2.Gauge(
501+
data_points=split_data_points,
493502
)
494503
)
504+
split_metrics.append(new_metric)
505+
495506
for data_point in metric.gauge.data_points:
496507
split_data_points.append(data_point)
508+
new_metric.sum.data_points.append(data_point)
497509
batch_size += 1
498510

499511
if batch_size >= self._max_export_batch_size:
@@ -528,20 +540,24 @@ def _split_metrics_data(
528540
)
529541
]
530542

543+
# Update scope metrics after all data_points added to metric
544+
new_scope_metrics.metrics.append(new_metric)
545+
531546
elif metric.HasField("summary"):
532547
split_data_points = []
533-
split_metrics.append(
534-
pb2.Metric(
535-
name=metric.name,
536-
description=metric.description,
537-
unit=metric.unit,
538-
summary=pb2.Summary(
539-
data_points=split_data_points,
540-
)
548+
new_metric = pb2.Metric(
549+
name=metric.name,
550+
description=metric.description,
551+
unit=metric.unit,
552+
summary=pb2.Summary(
553+
data_points=split_data_points,
541554
)
542555
)
556+
split_metrics.append(new_metric)
557+
543558
for data_point in metric.summary.data_points:
544559
split_data_points.append(data_point)
560+
new_metric.sum.data_points.append(data_point)
545561
batch_size += 1
546562

547563
if batch_size >= self._max_export_batch_size:
@@ -576,13 +592,19 @@ def _split_metrics_data(
576592
)
577593
]
578594

595+
# Update scope metrics after all data_points added to metric
596+
new_scope_metrics.metrics.append(new_metric)
597+
579598
else:
580599
_logger.warning("Tried to split and export an unsupported metric type.")
581600

582601
if not split_data_points:
583602
# If data_points is empty remove the whole metric
584603
split_metrics.pop()
585604

605+
# Update resource_metrics after all scope_metrics updated with metrics, data_points
606+
new_resource_metrics.scope_metrics.append(new_scope_metrics)
607+
586608
if not split_metrics:
587609
# If metrics is empty remove the whole scope_metrics
588610
split_scope_metrics.pop()

0 commit comments

Comments
 (0)