Skip to content

MAINT Parameters validation for sklearn.metrics.f1_score #25557

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Feb 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 20 additions & 2 deletions sklearn/metrics/_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
# License: BSD 3 clause


from numbers import Integral, Real
import warnings
import numpy as np

Expand All @@ -40,7 +41,7 @@
from ..utils.multiclass import type_of_target
from ..utils.validation import _num_samples
from ..utils.sparsefuncs import count_nonzero
from ..utils._param_validation import StrOptions, validate_params
from ..utils._param_validation import StrOptions, Options, validate_params
from ..exceptions import UndefinedMetricWarning

from ._base import _check_pos_label_consistency
Expand Down Expand Up @@ -1038,6 +1039,23 @@ def zero_one_loss(y_true, y_pred, *, normalize=True, sample_weight=None):
return n_samples - score


@validate_params(
{
"y_true": ["array-like", "sparse matrix"],
"y_pred": ["array-like", "sparse matrix"],
"labels": ["array-like", None],
"pos_label": [Real, str, "boolean", None],
"average": [
StrOptions({"micro", "macro", "samples", "weighted", "binary"}),
None,
],
"sample_weight": ["array-like", None],
"zero_division": [
Options(Integral, {0, 1}),
StrOptions({"warn"}),
],
}
)
def f1_score(
y_true,
y_pred,
Expand Down Expand Up @@ -1083,7 +1101,7 @@ def f1_score(
.. versionchanged:: 0.17
Parameter `labels` improved for multiclass problem.

pos_label : int, float, bool or str, default=1
pos_label : int, float, bool, str or None, default=1
The class to report if ``average='binary'`` and the data is binary.
If the data are multiclass or multilabel, this will be ignored;
setting ``labels=[pos_label]`` and ``average != 'binary'`` will report
Expand Down
1 change: 1 addition & 0 deletions sklearn/tests/test_public_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ def _check_function_param_validation(
"sklearn.metrics.confusion_matrix",
"sklearn.metrics.d2_pinball_score",
"sklearn.metrics.det_curve",
"sklearn.metrics.f1_score",
"sklearn.metrics.hamming_loss",
"sklearn.metrics.mean_absolute_error",
"sklearn.metrics.mean_squared_error",
Expand Down