Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove IOU non-max-suppression and ignore non taxonomy classes from mean calculation #344

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
# Important: Don't change this otherwise we will stop testing the earliest
# python version we have to support.
- image: python:3.6-buster
resource_class: small
resource_class: medium
parallelism: 6
steps:
- checkout # checkout source code to working directory
Expand Down
8 changes: 7 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,13 @@ All notable changes to the [Nucleus Python Client](https://github.com/scaleapi/n
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.14.11](https://github.com/scaleapi/nucleus-python-client/releases/tag/v0.14.12) - 2022-08-05
## [0.14.13](https://github.com/scaleapi/nucleus-python-client/releases/tag/v0.14.13) - 2022-08-10

### Fixed
- Validate Segmentation IOU being thresholded and non max suppressed.
- Validate Segmentation metrics now ignore out of taxonomy indexes for metrics

## [0.14.12](https://github.com/scaleapi/nucleus-python-client/releases/tag/v0.14.12) - 2022-08-05

### Added
- Added auto-paginated `Slice.export_predictions_generator`
Expand Down
46 changes: 25 additions & 21 deletions nucleus/metrics/segmentation_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def __init__(
prediction_filters: Optional[
Union[ListOfOrAndFilters, ListOfAndFilters]
] = None,
iou_threshold: float = 0.5,
iou_threshold: Optional[float] = None,
):
"""Initializes PolygonMetric abstract object.

Expand All @@ -61,6 +61,8 @@ def __init__(
each describe a single column predicate. The list of inner predicates is interpreted as a conjunction
(AND), forming a more selective `and` multiple field predicate.
Finally, the most outer list combines these filters as a disjunction (OR).
iou_threshold: Threshold to consider detections under IOU to be false positives. None if no
non-max-suppression is supposed to happen.
"""
# TODO -> add custom filtering to Segmentation(Annotation|Prediction).annotations.(metadata|label)
super().__init__(annotation_filters, prediction_filters)
Expand Down Expand Up @@ -135,7 +137,7 @@ def _calculate_confusion_matrix(
annotation_img,
prediction,
prediction_img,
iou_threshold,
iou_threshold=None,
) -> Tuple[np.ndarray, Set[int]]:
"""This calculates a confusion matrix with ground_truth_index X predicted_index summary

Expand Down Expand Up @@ -166,17 +168,19 @@ def _calculate_confusion_matrix(
confusion = self._filter_confusion_matrix(
confusion, annotation, prediction
)
confusion = non_max_suppress_confusion(confusion, iou_threshold)
false_positive = Segment(FALSE_POSITIVES, index=confusion.shape[0] - 1)
if annotation.annotations[-1].label != FALSE_POSITIVES:
annotation.annotations.append(false_positive)
if annotation.annotations is not prediction.annotations:
# Probably likely that this structure is re-used -> check if same list instance and only append once
# TODO(gunnar): Should this uniqueness be handled by the base class?
prediction.annotations.append(false_positive)
if iou_threshold is not None:
confusion = non_max_suppress_confusion(confusion, iou_threshold)
false_positive = Segment(
FALSE_POSITIVES, index=confusion.shape[0] - 1
)
if annotation.annotations[-1].label != FALSE_POSITIVES:
annotation.annotations.append(false_positive)
if annotation.annotations is not prediction.annotations:
# Probably likely that this structure is re-used -> check if same list instance and only append once
# TODO(gunnar): Should this uniqueness be handled by the base class?
prediction.annotations.append(false_positive)

# TODO(gunnar): Detect non_taxonomy classes for segmentation as well as instance segmentation
non_taxonomy_classes = set()
if self._is_instance_segmentation(annotation, prediction):
(
confusion,
Expand All @@ -198,6 +202,12 @@ def _calculate_confusion_matrix(
for segment in annotation.annotations
if segment.label in missing_or_filtered_labels
}
missing_indexes = (
set(range(confusion.shape[0]))
- set(a.index for a in annotation.annotations)
- set(a.index for a in prediction.annotations)
)
non_taxonomy_classes.update(missing_indexes)

return confusion, non_taxonomy_classes

Expand Down Expand Up @@ -246,7 +256,6 @@ def __init__(
prediction_filters: Optional[
Union[ListOfOrAndFilters, ListOfAndFilters]
] = None,
iou_threshold: float = 0.5,
):
"""Initializes PolygonIOU object.

Expand All @@ -273,7 +282,6 @@ def __init__(
super().__init__(
annotation_filters,
prediction_filters,
iou_threshold,
)

def _metric_impl(
Expand All @@ -288,18 +296,14 @@ def _metric_impl(
annotation_img,
prediction,
prediction_img,
self.iou_threshold,
)

with np.errstate(divide="ignore", invalid="ignore"):
tp = confusion[:-1, :-1]
fp = confusion[:, -1]
iou = np.diag(tp) / (
tp.sum(axis=1) + tp.sum(axis=0) + fp.sum() - np.diag(tp)
iou = np.diag(confusion) / (
confusion.sum(axis=1)
+ confusion.sum(axis=0)
- np.diag(confusion)
)
non_taxonomy_classes = non_taxonomy_classes - {
confusion.shape[1] - 1
}
iou.put(list(non_taxonomy_classes), np.nan)
mean_iou = np.nanmean(iou)
return ScalarResult(value=mean_iou, weight=annotation_img.size) # type: ignore
Expand Down
1 change: 0 additions & 1 deletion nucleus/metrics/segmentation_to_poly_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,6 @@ def configure_metric(self):
metric = SegmentationIOU(
self.annotation_filters,
self.prediction_filters,
self.iou_threshold,
)
else:
metric = PolygonIOU(
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ exclude = '''

[tool.poetry]
name = "scale-nucleus"
version = "0.14.12"
version = "0.14.13"
description = "The official Python client library for Nucleus, the Data Platform for AI"
license = "MIT"
authors = ["Scale AI Nucleus Team <[email protected]>"]
Expand Down
2 changes: 1 addition & 1 deletion tests/metrics/test_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def compose_input_variables(setup: SegmentationTestSetup):
def test_segmentation_iou(setup):
annotation, prediction, url_to_img = compose_input_variables(setup)

metric = SegmentationIOU(iou_threshold=setup.iou_threshold)
metric = SegmentationIOU()
metric.loader = InMemoryLoader(url_to_img)
result = metric(
AnnotationList(segmentation_annotations=[annotation]),
Expand Down