diff --git a/sklearn/metrics/_ranking.py b/sklearn/metrics/_ranking.py index c506953076bef..7f64f479ed275 100644 --- a/sklearn/metrics/_ranking.py +++ b/sklearn/metrics/_ranking.py @@ -372,7 +372,7 @@ def roc_auc_score( multi_class="raise", labels=None, ): - """Compute Area Under the Receiver Operating Characteristic Curve (ROC AUC) + """Compute Area Under the Receiver Operating Characteristic Curve (ROC AUC) \ from prediction scores. Note: this implementation can be used with binary, multiclass and @@ -471,6 +471,16 @@ class scores must correspond to the order of ``labels``, Returns ------- auc : float + Area Under the Curve score. + + See Also + -------- + average_precision_score : Area under the precision-recall curve. + roc_curve : Compute Receiver operating characteristic (ROC) curve. + RocCurveDisplay.from_estimator : Plot Receiver Operating Characteristic + (ROC) curve given an estimator and some data. + RocCurveDisplay.from_predictions : Plot Receiver Operating Characteristic + (ROC) curve given the true and predicted values. References ---------- @@ -493,15 +503,6 @@ class scores must correspond to the order of ``labels``, Machine Learning, 45(2), 171-186. `_ - See Also - -------- - average_precision_score : Area under the precision-recall curve. - roc_curve : Compute Receiver operating characteristic (ROC) curve. - RocCurveDisplay.from_estimator : Plot Receiver Operating Characteristic - (ROC) curve given an estimator and some data. - RocCurveDisplay.from_predictions : Plot Receiver Operating Characteristic - (ROC) curve given the true and predicted values. - Examples -------- Binary case: diff --git a/sklearn/tests/test_docstrings.py b/sklearn/tests/test_docstrings.py index 72f98765a5eb0..907d6fbcf96b3 100644 --- a/sklearn/tests/test_docstrings.py +++ b/sklearn/tests/test_docstrings.py @@ -40,7 +40,6 @@ "sklearn.metrics._plot.precision_recall_curve.plot_precision_recall_curve", "sklearn.metrics._ranking.coverage_error", "sklearn.metrics._ranking.dcg_score", - "sklearn.metrics._ranking.roc_auc_score", "sklearn.metrics._ranking.roc_curve", "sklearn.metrics._ranking.top_k_accuracy_score", "sklearn.metrics._regression.mean_pinball_loss",