Skip to content

Commit 7eb78fc

Browse files
fix(pre_commit): 🎨 auto format pre-commit hooks
1 parent cd93a9d commit 7eb78fc

File tree

2 files changed

+6
-13
lines changed

2 files changed

+6
-13
lines changed

‎supervision/metrics/mean_average_precision.py‎

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -331,9 +331,7 @@ def get_annotation_ids(
331331
anns
332332
if not area_range
333333
else [
334-
ann
335-
for ann in anns
336-
if area_range[0] < ann["area"] < area_range[1]
334+
ann for ann in anns if area_range[0] < ann["area"] < area_range[1]
337335
]
338336
)
339337

@@ -385,9 +383,7 @@ def get_category_ids(
385383

386384
# Filter by id
387385
cats = (
388-
cats
389-
if not cat_ids
390-
else [cat for cat in cats if cat["id"] in cat_ids]
386+
cats if not cat_ids else [cat for cat in cats if cat["id"] in cat_ids]
391387
)
392388
ids = [cat["id"] for cat in cats]
393389
return ids
@@ -670,7 +666,7 @@ def _evaluate_image(
670666
img_id: int,
671667
cat_id: int,
672668
area_range: list[int] | tuple[int, int],
673-
max_det: int
669+
max_det: int,
674670
) -> dict[str, Any] | None:
675671
"""
676672
Perform evaluation for single category and image.
@@ -1289,9 +1285,7 @@ def _prepare_targets(self, targets: list[Detections]) -> dict[str, list[dict]]:
12891285
"""Transform targets into a dictionary that can be used by the COCO evaluator"""
12901286
images = [{"id": img_id} for img_id in range(len(targets))]
12911287
if self._image_indices is not None:
1292-
images = [
1293-
{"id": self._image_indices[img.get("id")]} for img in images
1294-
]
1288+
images = [{"id": self._image_indices[img.get("id")]} for img in images]
12951289
# Annotations list
12961290
annotations = []
12971291
for image_id, image_targets in enumerate(targets):
@@ -1334,7 +1328,7 @@ def _prepare_targets(self, targets: list[Detections]) -> dict[str, list[dict]]:
13341328
"bbox": xywh,
13351329
"category_id": category_id,
13361330
"id": len(annotations) + 1, # Start IDs from 1 (0 means no match)
1337-
"ignore": 0
1331+
"ignore": 0,
13381332
}
13391333
annotations.append(dict_annotation)
13401334
# Category list
@@ -1391,7 +1385,7 @@ def _prepare_predictions(self, predictions: list[Detections]) -> list[dict]:
13911385
"score": score,
13921386
"category_id": category_id,
13931387
"area": area,
1394-
"id": len(coco_predictions) + 1
1388+
"id": len(coco_predictions) + 1,
13951389
}
13961390
coco_predictions.append(dict_prediction)
13971391
return coco_predictions

‎supervision/metrics/mean_average_recall.py‎

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -672,4 +672,3 @@ def _filter_predictions_and_targets_by_size(
672672
self._filter_detections_by_size(targets, size_category)
673673
)
674674
return new_predictions_list, new_targets_list
675-

0 commit comments

Comments
 (0)