Skip to content

Commit

Permalink
Metrics docs overhaul
Browse files Browse the repository at this point in the history
  • Loading branch information
LinasKo committed Aug 27, 2024
1 parent 026a243 commit 84bb44d
Show file tree
Hide file tree
Showing 6 changed files with 164 additions and 45 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@
comments: true
---

# Metrics
# Legacy Metrics

Starting with `0.23.0`, a new metrics module is being introduced to supervision.
Metrics here are part of the legacy evaluation API and will be deprecated in the future.

<div class="md-typeset">
<h2><a href="#supervision.metrics.detection.ConfusionMatrix">ConfusionMatrix</a></h2>
Expand Down
12 changes: 11 additions & 1 deletion docs/metrics/intersection_over_union.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,16 @@ comments: true
status: new
---

# Detections
# Intersection over Union

<div class="md-typeset">
<h2><a href="#supervision.metrics.intersection_over_union.IntersectionOverUnion">IntersectionOverUnion</a></h2>
</div>

:::supervision.metrics.intersection_over_union.IntersectionOverUnion

<div class="md-typeset">
<h2><a href="#supervision.metrics.intersection_over_union.IntersectionOverUnionResult">IntersectionOverUnionResult</a></h2>
</div>

:::supervision.metrics.intersection_over_union.IntersectionOverUnionResult
18 changes: 18 additions & 0 deletions docs/metrics/mean_average_precision.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
comments: true
status: new
---

# Mean Average Precision

<div class="md-typeset">
<h2><a href="#supervision.metrics.mean_average_precision.MeanAveragePrecision">MeanAveragePrecision</a></h2>
</div>

:::supervision.metrics.mean_average_precision.MeanAveragePrecision

<div class="md-typeset">
<h2><a href="#supervision.metrics.mean_average_precision.MeanAveragePrecisionResult">MeanAveragePrecisionResult</a></h2>
</div>

:::supervision.metrics.mean_average_precision.MeanAveragePrecisionResult
3 changes: 2 additions & 1 deletion mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ nav:
- Detection and Segmentation:
- Core: detection/core.md
- Annotators: detection/annotators.md
- Metrics: detection/metrics.md
- Double Detection Filter: detection/double_detection_filter.md
- Utils: detection/utils.md
- Keypoint Detection:
Expand All @@ -67,6 +66,8 @@ nav:
- Utils: datasets/utils.md
- Metrics:
- IoU: metrics/intersection_over_union.md
- mAP: metrics/mean_average_precision.md
- Legacy Metrics: detection/legacy_metrics.md
- Utils:
- Video: utils/video.md
- Image: utils/image.md
Expand Down
84 changes: 77 additions & 7 deletions supervision/metrics/intersection_over_union.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,15 +65,13 @@ def update(
) -> IntersectionOverUnion:
"""
Add data to the metric, without computing the result.
Should call all update methods of the shared data store.
Args:
data_1 (Union[Detection, List[Detections]]): The first set of data.
data_2 (Union[Detection, List[Detections]]): The second set of data.
Returns:
Metric: The metric object itself. You can get the metric result
by calling the `compute` method.
(IntersectionOverUnion): The updated metric instance.
"""
if self._is_store_shared:
# Should be updated by the parent metric
Expand Down Expand Up @@ -103,10 +101,32 @@ def compute(self) -> IntersectionOverUnionResult:
Uses the data set with the `update` method.
Returns:
Dict[int, npt.NDArray[np.float32]]: A dictionary with class IDs as keys.
If no class ID is provided, the key is the value CLASS_ID_NONE. The values
are (N, M) arrays where N is the number of predictions and M is the number
of targets.
IntersectionOverUnionResult: IoU results.
Example:
```python
import supervision as sv
from supervision.metrics import IntersectionOverUnion
detections_1 = sv.Detections(...)
detections_2 = sv.Detections(...)
iou_metric = IntersectionOverUnion(class_agnostic=False)
iou_result = map_metric.update(detections_1, detections_2).compute()
print(iou_result)
class_id = 2
ious = iou_result[class_id]
class_id = -1 # no class
ious = iou_result[class_id]
for class_id, ious in iou_result:
...
iou_result.plot()
```
"""
ious_by_class = {}
for class_id in self._store.get_classes():
Expand Down Expand Up @@ -135,25 +155,75 @@ def compute(self) -> IntersectionOverUnionResult:
@dataclass
class IntersectionOverUnionResult:
ious_by_class: Dict[int, npt.NDArray[np.float32]]
"""The IoU matrices for each class."""

metric_target: MetricTarget
"""
Defines the type of data used for the metric - boxes, masks or
oriented bounding boxes.
"""

@property
def class_ids(self) -> List[int]:
return list(self.ious_by_class.keys())

def __getitem__(self, class_id: int) -> npt.NDArray[np.float32]:
"""
Get the IoU matrix for a specific class.
Args:
class_id (int): The class ID. Set `-1` to access "no class" data.
If class-agnostic IoU was used, all class IDs will be `-1`.
Returns:
(npt.NDArray[np.float32]): The IoU matrix for the class.
Example:
```python
class_id = 2
ious = iou_result[class_id]
```
"""
return self.ious_by_class[class_id]

def __iter__(self):
"""
Iterate over the IoU matrices for each class.
Returns:
(Iterator[Tuple[int, npt.NDArray[np.float32]]]): An iterator
with class IDs as keys and IoU matrices as values.
Example:
```python
for class_id, ious in iou_result:
...
```
"""
return iter(self.ious_by_class.items())

def __str__(self) -> str:
"""
Format the IoU results as a pretty string.
Example:
```python
print(iou_result)
```
"""
out_str = f"{self.__class__.__name__}:\n"
for class_id, iou in self.ious_by_class.items():
out_str += f"IoUs for class {class_id}:\n{str(iou)}\n"
return out_str

def to_pandas(self) -> Dict[int, "pd.DataFrame"]:
"""
Convert the results to multiple pandas DataFrames.
Returns:
(Dict[int, pd.DataFrame]): A dictionary with class IDs as keys and pandas
DataFrames as values.
"""
ensure_pandas_installed()
import pandas as pd

Expand Down
87 changes: 52 additions & 35 deletions supervision/metrics/mean_average_precision.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,16 @@ def update(
predictions: Union[Detections, List[Detections]],
targets: Union[Detections, List[Detections]],
) -> MeanAveragePrecision:
"""
Add new predictions and targets to the metric, but do not compute the result.
Args:
predictions (Union[Detections, List[Detections]]): The predicted detections.
targets (Union[Detections, List[Detections]]): The ground-truth detections.
Returns:
(MeanAveragePrecision): The updated metric instance.
"""
if not isinstance(predictions, list):
predictions = [predictions]
if not isinstance(targets, list):
Expand Down Expand Up @@ -88,45 +98,22 @@ def compute(
number of ground-truth objects. Each row is expected to be in
`(x_min, y_min, x_max, y_max, class)` format.
Returns:
MeanAveragePrecision: New instance of MeanAveragePrecision.
(MeanAveragePrecision): New instance of MeanAveragePrecision.
Example:
```python
import supervision as sv
import numpy as np
targets = (
[
np.array(
[
[0.0, 0.0, 3.0, 3.0, 1],
[2.0, 2.0, 5.0, 5.0, 1],
[6.0, 1.0, 8.0, 3.0, 2],
]
),
np.array([[1.0, 1.0, 2.0, 2.0, 2]]),
]
)
from supervision.metrics import MeanAveragePrecision
predictions = [
np.array(
[
[0.0, 0.0, 3.0, 3.0, 1, 0.9],
[0.1, 0.1, 3.0, 3.0, 0, 0.9],
[6.0, 1.0, 8.0, 3.0, 1, 0.8],
[1.0, 6.0, 2.0, 7.0, 1, 0.8],
]
),
np.array([[1.0, 1.0, 2.0, 2.0, 2, 0.8]])
]
predictions = sv.Detections(...)
targets = sv.Detections(...)
mean_average_precison = sv.MeanAveragePrecision.from_tensors(
predictions=predictions,
targets=targets,
)
map_metric = MeanAveragePrecision()
map_result = map_metric.update(predictions, targets).compute()
print(mean_average_precison.map50_95)
# 0.6649
print(map_result)
print(map_result.map50_95)
map_result.plot()
```
"""
(
Expand Down Expand Up @@ -243,6 +230,7 @@ def _compute(
map50=map50,
map75=map75,
per_class_ap50_95=average_precisions,
metric_target=self._metric_target,
)

@staticmethod
Expand All @@ -256,7 +244,7 @@ def compute_average_precision(recall: np.ndarray, precision: np.ndarray) -> floa
precision (np.ndarray): The precision curve.
Returns:
float: Average precision.
(float): Average precision.
"""
extended_recall = np.concatenate(([0.0], recall, [1.0]))
extended_precision = np.concatenate(([1.0], precision, [0.0]))
Expand Down Expand Up @@ -320,7 +308,7 @@ def _average_precisions_per_class(
eps (float, optional): Small value to prevent division by zero.
Returns:
np.ndarray: Average precision for different IoU levels.
(np.ndarray): Average precision for different IoU levels.
"""
eps = 1e-16

Expand Down Expand Up @@ -361,15 +349,44 @@ def _average_precisions_per_class(
@dataclass
class MeanAveragePrecisionResult:
iou_thresholds: np.ndarray
"""Array of IoU thresholds used in the calculations"""
map50_95: float
"""Mean Average Precision over IoU thresholds from 0.5 to 0.95"""

map50: float
"""Mean Average Precision at IoU threshold of 0.5"""

map75: float
"""Mean Average Precision at IoU threshold of 0.75"""

per_class_ap50_95: np.ndarray
"""Average precision for each class at different IoU thresholds"""

metric_target: MetricTarget
"""
Defines the type of data used for the metric - boxes, masks or
oriented bounding boxes.
"""

small_objects: Optional[MeanAveragePrecisionResult] = None
"""Mean Average Precision results for small objects"""

medium_objects: Optional[MeanAveragePrecisionResult] = None
"""Mean Average Precision results for medium objects"""

large_objects: Optional[MeanAveragePrecisionResult] = None
"""Mean Average Precision results for large objects"""

def __str__(self) -> str:
"""
Format the mAP results as a pretty string.
Example:
```python
print(map_result)
```
"""

out_str = (
f"{self.__class__.__name__}:\n"
f"iou_thresholds: {self.iou_thresholds}\n"
Expand Down Expand Up @@ -402,7 +419,7 @@ def to_pandas(self) -> "pd.DataFrame":
Convert the result to a pandas DataFrame.
Returns:
pd.DataFrame: The result as a DataFrame.
(pd.DataFrame): The result as a DataFrame.
"""
ensure_pandas_installed()
import pandas as pd
Expand Down

0 comments on commit 84bb44d

Please sign in to comment.