-
Notifications
You must be signed in to change notification settings - Fork 1.6k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[MAINTENANCE] dry up validation status calc (#8962)
- Loading branch information
1 parent
4ded445
commit a25fe64
Showing
5 changed files
with
121 additions
and
141 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
from __future__ import annotations | ||
|
||
from typing import TYPE_CHECKING, NamedTuple | ||
|
||
if TYPE_CHECKING: | ||
from great_expectations.core.expectation_validation_result import ( | ||
ExpectationValidationResult, | ||
) | ||
|
||
|
||
class ValidationStatistics(NamedTuple): | ||
evaluated_expectations: int | ||
successful_expectations: int | ||
unsuccessful_expectations: int | ||
success_percent: float | None | ||
success: bool | ||
|
||
|
||
def calc_validation_statistics( | ||
validation_results: list[ExpectationValidationResult], | ||
) -> ValidationStatistics: | ||
""" | ||
Calculate summary statistics for the validation results and | ||
return ``ExpectationStatistics``. | ||
""" | ||
# calc stats | ||
evaluated_expectations = len(validation_results) | ||
successful_expectations = len([exp for exp in validation_results if exp.success]) | ||
unsuccessful_expectations = evaluated_expectations - successful_expectations | ||
success = successful_expectations == evaluated_expectations | ||
try: | ||
success_percent = successful_expectations / evaluated_expectations * 100 | ||
except ZeroDivisionError: | ||
success_percent = None | ||
|
||
return ValidationStatistics( | ||
successful_expectations=successful_expectations, | ||
evaluated_expectations=evaluated_expectations, | ||
unsuccessful_expectations=unsuccessful_expectations, | ||
success=success, | ||
success_percent=success_percent, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
import pytest | ||
|
||
from great_expectations.core.expectation_validation_result import ( | ||
ExpectationValidationResult, | ||
) | ||
from great_expectations.validator.validation_statistics import ( | ||
ValidationStatistics, | ||
calc_validation_statistics, | ||
) | ||
|
||
|
||
@pytest.mark.unit | ||
def test_stats_no_expectations(): | ||
expectation_results = [] | ||
actual = calc_validation_statistics(expectation_results) | ||
|
||
# pay attention to these two | ||
assert None is actual.success_percent | ||
assert True is actual.success | ||
# the rest is boring | ||
assert 0 == actual.successful_expectations | ||
assert 0 == actual.evaluated_expectations | ||
assert 0 == actual.unsuccessful_expectations | ||
|
||
|
||
@pytest.mark.unit | ||
def test_stats_no_successful_expectations(): | ||
expectation_results = [ExpectationValidationResult(success=False)] | ||
actual = calc_validation_statistics(expectation_results) | ||
expected = ValidationStatistics(1, 0, 1, 0.0, False) | ||
assert expected == actual | ||
|
||
expectation_results = [ | ||
ExpectationValidationResult(success=False), | ||
ExpectationValidationResult(success=False), | ||
ExpectationValidationResult(success=False), | ||
] | ||
actual = calc_validation_statistics(expectation_results) | ||
expected = ValidationStatistics(3, 0, 3, 0.0, False) | ||
assert expected == actual | ||
|
||
|
||
@pytest.mark.unit | ||
def test_stats_all_successful_expectations(): | ||
expectation_results = [ | ||
ExpectationValidationResult(success=True), | ||
] | ||
actual = calc_validation_statistics(expectation_results) | ||
expected = ValidationStatistics(1, 1, 0, 100.0, True) | ||
assert expected == actual | ||
|
||
expectation_results = [ | ||
ExpectationValidationResult(success=True), | ||
ExpectationValidationResult(success=True), | ||
ExpectationValidationResult(success=True), | ||
] | ||
actual = calc_validation_statistics(expectation_results) | ||
expected = ValidationStatistics(3, 3, 0, 100.0, True) | ||
assert expected == actual | ||
|
||
|
||
@pytest.mark.unit | ||
def test_stats_mixed_expectations(): | ||
expectation_results = [ | ||
ExpectationValidationResult(success=False), | ||
ExpectationValidationResult(success=True), | ||
] | ||
actual = calc_validation_statistics(expectation_results) | ||
expected = ValidationStatistics(2, 1, 1, 50.0, False) | ||
assert expected == actual |