From 063c74328fcd813a4fa01f87447d16dcd6b4a37b Mon Sep 17 00:00:00 2001 From: Randy Olson Date: Sat, 19 Aug 2017 12:57:52 -0400 Subject: [PATCH] Tiny docs fix for roc_auc_score The `roc_auc_score` docs list `average_precision_score` as "Area under the precision-recall curve". This is not correct, so I fixed the description to the correct description of `average_precision_score`. --- sklearn/metrics/ranking.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sklearn/metrics/ranking.py b/sklearn/metrics/ranking.py index 9755732a4f910..4f551d55e87b1 100644 --- a/sklearn/metrics/ranking.py +++ b/sklearn/metrics/ranking.py @@ -237,7 +237,7 @@ def roc_auc_score(y_true, y_score, average="macro", sample_weight=None): See also -------- - average_precision_score : Area under the precision-recall curve + average_precision_score : Average precision (AP) from prediction scores roc_curve : Compute Receiver operating characteristic (ROC)