Skip to content

Commit dc86036

Browse files
Lint
1 parent d09feba commit dc86036

File tree

2 files changed

+140
-67
lines changed

2 files changed

+140
-67
lines changed

exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py

Lines changed: 67 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
Any,
2323
Callable,
2424
Dict,
25+
Iterable,
2526
List,
2627
Mapping,
2728
Sequence,
@@ -262,11 +263,13 @@ def export(
262263
resp.text,
263264
)
264265
return MetricExportResult.FAILURE
265-
266+
266267
# Else, attempt export in batches for this retry
267268
else:
268269
export_result = MetricExportResult.SUCCESS
269-
for split_metrics_data in self._split_metrics_data(serialized_data):
270+
for split_metrics_data in self._split_metrics_data(
271+
serialized_data
272+
):
270273
split_resp = self._export(
271274
split_metrics_data.SerializeToString()
272275
)
@@ -288,17 +291,17 @@ def export(
288291
split_resp.text,
289292
)
290293
export_result = MetricExportResult.FAILURE
291-
294+
292295
# Return result after all batches are attempted
293296
return export_result
294-
297+
295298
return MetricExportResult.FAILURE
296299

297300
def _split_metrics_data(
298301
self,
299302
metrics_data: pb2.MetricsData,
300303
) -> Iterable[pb2.MetricsData]:
301-
"""Splits metrics data into several MetricsData (copies protobuf originals),
304+
"""Splits metrics data into several MetricsData (copies protobuf originals),
302305
based on configured data point max export batch size.
303306
304307
Args:
@@ -350,7 +353,7 @@ def _split_metrics_data(
350353
"aggregation_temporality": metric.sum.aggregation_temporality,
351354
"is_monotonic": metric.sum.is_monotonic,
352355
"data_points": split_data_points,
353-
}
356+
},
354357
}
355358
)
356359
current_data_points = metric.sum.data_points
@@ -363,7 +366,7 @@ def _split_metrics_data(
363366
"histogram": {
364367
"aggregation_temporality": metric.histogram.aggregation_temporality,
365368
"data_points": split_data_points,
366-
}
369+
},
367370
}
368371
)
369372
current_data_points = metric.histogram.data_points
@@ -376,10 +379,12 @@ def _split_metrics_data(
376379
"exponential_histogram": {
377380
"aggregation_temporality": metric.exponential_histogram.aggregation_temporality,
378381
"data_points": split_data_points,
379-
}
382+
},
380383
}
381384
)
382-
current_data_points = metric.exponential_histogram.data_points
385+
current_data_points = (
386+
metric.exponential_histogram.data_points
387+
)
383388
elif metric.HasField("gauge"):
384389
split_metrics.append(
385390
{
@@ -388,7 +393,7 @@ def _split_metrics_data(
388393
"unit": metric.unit,
389394
"gauge": {
390395
"data_points": split_data_points,
391-
}
396+
},
392397
}
393398
)
394399
current_data_points = metric.gauge.data_points
@@ -400,11 +405,13 @@ def _split_metrics_data(
400405
"unit": metric.unit,
401406
"summary": {
402407
"data_points": split_data_points,
403-
}
408+
},
404409
}
405410
)
406411
else:
407-
_logger.warning("Tried to split and export an unsupported metric type. Skipping.")
412+
_logger.warning(
413+
"Tried to split and export an unsupported metric type. Skipping."
414+
)
408415
continue
409416

410417
for data_point in current_data_points:
@@ -413,7 +420,9 @@ def _split_metrics_data(
413420

414421
if batch_size >= self._max_export_batch_size:
415422
yield pb2.MetricsData(
416-
resource_metrics=self._get_split_resource_metrics_pb2(split_resource_metrics)
423+
resource_metrics=self._get_split_resource_metrics_pb2(
424+
split_resource_metrics
425+
)
417426
)
418427

419428
# Reset all the reference variables with current metrics_data position
@@ -432,7 +441,7 @@ def _split_metrics_data(
432441
"aggregation_temporality": metric.sum.aggregation_temporality,
433442
"is_monotonic": metric.sum.is_monotonic,
434443
"data_points": split_data_points,
435-
}
444+
},
436445
}
437446
]
438447
elif metric.HasField("histogram"):
@@ -444,7 +453,7 @@ def _split_metrics_data(
444453
"histogram": {
445454
"aggregation_temporality": metric.histogram.aggregation_temporality,
446455
"data_points": split_data_points,
447-
}
456+
},
448457
}
449458
]
450459
elif metric.HasField("exponential_histogram"):
@@ -456,7 +465,7 @@ def _split_metrics_data(
456465
"exponential_histogram": {
457466
"aggregation_temporality": metric.exponential_histogram.aggregation_temporality,
458467
"data_points": split_data_points,
459-
}
468+
},
460469
}
461470
]
462471
elif metric.HasField("gauge"):
@@ -467,7 +476,7 @@ def _split_metrics_data(
467476
"unit": metric.unit,
468477
"gauge": {
469478
"data_points": split_data_points,
470-
}
479+
},
471480
}
472481
]
473482
elif metric.HasField("summary"):
@@ -478,7 +487,7 @@ def _split_metrics_data(
478487
"unit": metric.unit,
479488
"summary": {
480489
"data_points": split_data_points,
481-
}
490+
},
482491
}
483492
]
484493

@@ -511,7 +520,9 @@ def _split_metrics_data(
511520

512521
if batch_size > 0:
513522
yield pb2.MetricsData(
514-
resource_metrics=self._get_split_resource_metrics_pb2(split_resource_metrics)
523+
resource_metrics=self._get_split_resource_metrics_pb2(
524+
split_resource_metrics
525+
)
515526
)
516527

517528
def _get_split_resource_metrics_pb2(
@@ -568,16 +579,16 @@ def _get_split_resource_metrics_pb2(
568579
split_resource_metrics_pb = []
569580
for resource_metrics in split_resource_metrics:
570581
new_resource_metrics = pb2.ResourceMetrics(
571-
resource=resource_metrics.get("resource"),
572-
scope_metrics=[],
573-
schema_url=resource_metrics.get("schema_url"),
574-
)
582+
resource=resource_metrics.get("resource"),
583+
scope_metrics=[],
584+
schema_url=resource_metrics.get("schema_url"),
585+
)
575586
for scope_metrics in resource_metrics.get("scope_metrics", []):
576587
new_scope_metrics = pb2.ScopeMetrics(
577-
scope=scope_metrics.get("scope"),
578-
metrics=[],
579-
schema_url=scope_metrics.get("schema_url"),
580-
)
588+
scope=scope_metrics.get("scope"),
589+
metrics=[],
590+
schema_url=scope_metrics.get("schema_url"),
591+
)
581592

582593
for metric in scope_metrics.get("metrics", []):
583594
new_metric = None
@@ -590,9 +601,13 @@ def _get_split_resource_metrics_pb2(
590601
unit=metric.get("unit"),
591602
sum=pb2.Sum(
592603
data_points=[],
593-
aggregation_temporality=metric.get("sum").get("aggregation_temporality"),
594-
is_monotonic=metric.get("sum").get("is_monotonic"),
595-
)
604+
aggregation_temporality=metric.get("sum").get(
605+
"aggregation_temporality"
606+
),
607+
is_monotonic=metric.get("sum").get(
608+
"is_monotonic"
609+
),
610+
),
596611
)
597612
data_points = metric.get("sum").get("data_points")
598613
elif "histogram" in metric:
@@ -602,29 +617,37 @@ def _get_split_resource_metrics_pb2(
602617
unit=metric.get("unit"),
603618
histogram=pb2.Histogram(
604619
data_points=[],
605-
aggregation_temporality=metric.get("histogram").get("aggregation_temporality"),
620+
aggregation_temporality=metric.get(
621+
"histogram"
622+
).get("aggregation_temporality"),
606623
),
607624
)
608-
data_points = metric.get("histogram").get("data_points")
625+
data_points = metric.get("histogram").get(
626+
"data_points"
627+
)
609628
elif "exponential_histogram" in metric:
610629
new_metric = pb2.Metric(
611630
name=metric.get("name"),
612631
description=metric.get("description"),
613632
unit=metric.get("unit"),
614633
exponential_histogram=pb2.ExponentialHistogram(
615634
data_points=[],
616-
aggregation_temporality=metric.get("exponential_histogram").get("aggregation_temporality"),
635+
aggregation_temporality=metric.get(
636+
"exponential_histogram"
637+
).get("aggregation_temporality"),
617638
),
618639
)
619-
data_points = metric.get("exponential_histogram").get("data_points")
640+
data_points = metric.get("exponential_histogram").get(
641+
"data_points"
642+
)
620643
elif "gauge" in metric:
621644
new_metric = pb2.Metric(
622645
name=metric.get("name"),
623646
description=metric.get("description"),
624647
unit=metric.get("unit"),
625648
gauge=pb2.Gauge(
626649
data_points=[],
627-
)
650+
),
628651
)
629652
data_points = metric.get("gauge").get("data_points")
630653
elif "summary" in metric:
@@ -634,11 +657,13 @@ def _get_split_resource_metrics_pb2(
634657
unit=metric.get("unit"),
635658
summary=pb2.Summary(
636659
data_points=[],
637-
)
660+
),
638661
)
639662
data_points = metric.get("summary").get("data_points")
640663
else:
641-
_logger.warning("Tried to split and export an unsupported metric type. Skipping.")
664+
_logger.warning(
665+
"Tried to split and export an unsupported metric type. Skipping."
666+
)
642667
continue
643668

644669
for data_point in data_points:
@@ -647,15 +672,17 @@ def _get_split_resource_metrics_pb2(
647672
elif "histogram" in metric:
648673
new_metric.histogram.data_points.append(data_point)
649674
elif "exponential_histogram" in metric:
650-
new_metric.exponential_histogram.data_points.append(data_point)
675+
new_metric.exponential_histogram.data_points.append(
676+
data_point
677+
)
651678
elif "gauge" in metric:
652679
new_metric.gauge.data_points.append(data_point)
653680
elif "summary" in metric:
654681
new_metric.summary.data_points.append(data_point)
655-
682+
656683
new_scope_metrics.metrics.append(new_metric)
657684
new_resource_metrics.scope_metrics.append(new_scope_metrics)
658-
split_resource_metrics_pb.append(new_resource_metrics)
685+
split_resource_metrics_pb.append(new_resource_metrics)
659686
return split_resource_metrics_pb
660687

661688
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:

0 commit comments

Comments
 (0)