From 60a5027159a646c1ab8ee3c8608c635d1580a13b Mon Sep 17 00:00:00 2001 From: Vincent M Date: Wed, 3 Aug 2022 14:06:14 +0200 Subject: [PATCH] Fix doc validation error for `v_measure_score` --- sklearn/metrics/cluster/_supervised.py | 20 +++++++++----------- sklearn/tests/test_docstrings.py | 1 - 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/sklearn/metrics/cluster/_supervised.py b/sklearn/metrics/cluster/_supervised.py index 2f2f55fcd2156..8ffd4083eef37 100644 --- a/sklearn/metrics/cluster/_supervised.py +++ b/sklearn/metrics/cluster/_supervised.py @@ -642,16 +642,15 @@ def v_measure_score(labels_true, labels_pred, *, beta=1.0): measure the agreement of two independent label assignments strategies on the same dataset when the real ground truth is not known. - Read more in the :ref:`User Guide `. Parameters ---------- labels_true : int array, shape = [n_samples] - ground truth class labels to be used as a reference + Ground truth class labels to be used as a reference. labels_pred : array-like of shape (n_samples,) - cluster labels to evaluate + Cluster labels to evaluate. beta : float, default=1.0 Ratio of weight attributed to ``homogeneity`` vs ``completeness``. @@ -662,7 +661,13 @@ def v_measure_score(labels_true, labels_pred, *, beta=1.0): Returns ------- v_measure : float - score between 0.0 and 1.0. 1.0 stands for perfectly complete labeling + Score between 0.0 and 1.0. 1.0 stands for perfectly complete labeling. + + See Also + -------- + homogeneity_score : Homogeneity metric of cluster labeling. + completeness_score : Completeness metric of cluster labeling. + normalized_mutual_info_score : Normalized Mutual Information. References ---------- @@ -671,15 +676,8 @@ def v_measure_score(labels_true, labels_pred, *, beta=1.0): conditional entropy-based external cluster evaluation measure `_ - See Also - -------- - homogeneity_score - completeness_score - normalized_mutual_info_score - Examples -------- - Perfect labelings are both homogeneous and complete, hence have score 1.0:: >>> from sklearn.metrics.cluster import v_measure_score diff --git a/sklearn/tests/test_docstrings.py b/sklearn/tests/test_docstrings.py index da45dbb9eca8e..3996e10372f57 100644 --- a/sklearn/tests/test_docstrings.py +++ b/sklearn/tests/test_docstrings.py @@ -43,7 +43,6 @@ "sklearn.metrics.cluster._supervised.normalized_mutual_info_score", "sklearn.metrics.cluster._supervised.pair_confusion_matrix", "sklearn.metrics.cluster._supervised.rand_score", - "sklearn.metrics.cluster._supervised.v_measure_score", "sklearn.metrics.pairwise.pairwise_distances_chunked", "sklearn.preprocessing._data.maxabs_scale", "sklearn.preprocessing._data.scale",