Skip to content

Commit

Permalink
Add test for invalid average values
Browse files Browse the repository at this point in the history
  • Loading branch information
fluegelk committed Nov 26, 2024
1 parent bd4ccb0 commit e4d3463
Showing 1 changed file with 16 additions and 0 deletions.
16 changes: 16 additions & 0 deletions tests/test_evaluation_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,22 @@ def test_precision_recall_fscore__imbalanced(self, n_classes: int) -> None:
assert actual == pytest.approx(expected_manual, 1e-6)
assert actual == pytest.approx(expected_sklearn, 1e-6)

def test_precision_recall_fscore__invalid_average(self, n_classes: int) -> None:
"""
Test precision_recall_fscore with invalid average parameters. Should raise a ValueError.
Parameters
----------
n_classes : int
The number of classes in the dataset generated for testing the metric.
"""
y_true = np.arange(n_classes).repeat(5)
confusion_matrix = sklearn.metrics.confusion_matrix(y_true, y_true)
invalid_averages = ["invalid_average", 1, True, 0.01234]
for invalid_average in invalid_averages:
with pytest.raises(ValueError):
evaluation_metrics.precision_recall_fscore(confusion_matrix, average=invalid_average)

def test_precision_score(self, n_classes: int) -> None:
"""
Test the precision score metric for a variable number of classes in both balanced and unbalanced cases.
Expand Down

0 comments on commit e4d3463

Please sign in to comment.