Skip to content

[MRG] Make files private for sklearn.metrics and sklearn.metrics.cluster #15306

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Oct 23, 2019
12 changes: 12 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,18 @@ sklearn/svm/libsvm.py
sklearn/svm/libsvm_sparse.py
sklearn/svm/liblinear.py

sklearn/metrics/cluster/bicluster.py
sklearn/metrics/cluster/supervised.py
sklearn/metrics/cluster/unsupervised.py
sklearn/metrics/cluster/expected_mutual_info_fast.py

sklearn/metrics/base.py
sklearn/metrics/classification.py
sklearn/metrics/regression.py
sklearn/metrics/ranking.py
sklearn/metrics/pairwise_fast.py
sklearn/metrics/scorer.py

sklearn/inspection/partial_dependence.py
sklearn/inspection/permutation_importance.py

Expand Down
20 changes: 20 additions & 0 deletions sklearn/_build_utils/deprecated_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,26 @@
'set_verbosity_wrap'),
('_liblinear', 'sklearn.svm.liblinear', 'sklearn.svm', 'train_wrap'),

('_bicluster', 'sklearn.metrics.cluster.bicluster',
'sklearn.metrics.cluster', 'consensus_score'),
('_supervised', 'sklearn.metrics.cluster.supervised',
'sklearn.metrics.cluster', 'entropy'),
('_unsupervised', 'sklearn.metrics.cluster.unsupervised',
'sklearn.metrics.cluster', 'silhouette_score'),
('_expected_mutual_info_fast',
'sklearn.metrics.cluster.expected_mutual_info_fast',
'sklearn.metrics.cluster', 'expected_mutual_information'),

('_base', 'sklearn.metrics.base', 'sklearn.metrics', 'combinations'),
('_classification', 'sklearn.metrics.classification', 'sklearn.metrics',
'accuracy_score'),
('_regression', 'sklearn.metrics.regression', 'sklearn.metrics',
'max_error'),
('_ranking', 'sklearn.metrics.ranking', 'sklearn.metrics', 'roc_curve'),
('_pairwise_fast', 'sklearn.metrics.pairwise_fast', 'sklearn.metrics',
'np'),
('_scorer', 'sklearn.metrics.scorer', 'sklearn.metrics', 'get_scorer'),

('_partial_dependence', 'sklearn.inspection.partial_dependence',
'sklearn.inspection', 'partial_dependence'),
('_permutation_importance', 'sklearn.inspection.permutation_importance',
Expand Down
2 changes: 1 addition & 1 deletion sklearn/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ def score(self, X, y, sample_weight=None):
"""

from .metrics import r2_score
from .metrics.regression import _check_reg_targets
from .metrics._regression import _check_reg_targets
y_pred = self.predict(X)
# XXX: Remove the check in 0.23
y_type, _, _, _ = _check_reg_targets(y, y_pred, None)
Expand Down
2 changes: 1 addition & 1 deletion sklearn/linear_model/ridge.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from ..utils.validation import _check_sample_weight
from ..preprocessing import LabelBinarizer
from ..model_selection import GridSearchCV
from ..metrics.scorer import check_scoring
from ..metrics import check_scoring
from ..exceptions import ConvergenceWarning
from ..utils.sparsefuncs import mean_variance_axis

Expand Down
86 changes: 43 additions & 43 deletions sklearn/metrics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,36 +4,36 @@
"""


from .ranking import auc
from .ranking import average_precision_score
from .ranking import coverage_error
from .ranking import dcg_score
from .ranking import label_ranking_average_precision_score
from .ranking import label_ranking_loss
from .ranking import ndcg_score
from .ranking import precision_recall_curve
from .ranking import roc_auc_score
from .ranking import roc_curve
from ._ranking import auc
from ._ranking import average_precision_score
from ._ranking import coverage_error
from ._ranking import dcg_score
from ._ranking import label_ranking_average_precision_score
from ._ranking import label_ranking_loss
from ._ranking import ndcg_score
from ._ranking import precision_recall_curve
from ._ranking import roc_auc_score
from ._ranking import roc_curve

from .classification import accuracy_score
from .classification import balanced_accuracy_score
from .classification import classification_report
from .classification import cohen_kappa_score
from .classification import confusion_matrix
from .classification import f1_score
from .classification import fbeta_score
from .classification import hamming_loss
from .classification import hinge_loss
from .classification import jaccard_similarity_score
from .classification import jaccard_score
from .classification import log_loss
from .classification import matthews_corrcoef
from .classification import precision_recall_fscore_support
from .classification import precision_score
from .classification import recall_score
from .classification import zero_one_loss
from .classification import brier_score_loss
from .classification import multilabel_confusion_matrix
from ._classification import accuracy_score
from ._classification import balanced_accuracy_score
from ._classification import classification_report
from ._classification import cohen_kappa_score
from ._classification import confusion_matrix
from ._classification import f1_score
from ._classification import fbeta_score
from ._classification import hamming_loss
from ._classification import hinge_loss
from ._classification import jaccard_similarity_score
from ._classification import jaccard_score
from ._classification import log_loss
from ._classification import matthews_corrcoef
from ._classification import precision_recall_fscore_support
from ._classification import precision_score
from ._classification import recall_score
from ._classification import zero_one_loss
from ._classification import brier_score_loss
from ._classification import multilabel_confusion_matrix

from . import cluster
from .cluster import adjusted_mutual_info_score
Expand All @@ -60,22 +60,22 @@
from .pairwise import pairwise_kernels
from .pairwise import pairwise_distances_chunked

from .regression import explained_variance_score
from .regression import max_error
from .regression import mean_absolute_error
from .regression import mean_squared_error
from .regression import mean_squared_log_error
from .regression import median_absolute_error
from .regression import r2_score
from .regression import mean_tweedie_deviance
from .regression import mean_poisson_deviance
from .regression import mean_gamma_deviance
from ._regression import explained_variance_score
from ._regression import max_error
from ._regression import mean_absolute_error
from ._regression import mean_squared_error
from ._regression import mean_squared_log_error
from ._regression import median_absolute_error
from ._regression import r2_score
from ._regression import mean_tweedie_deviance
from ._regression import mean_poisson_deviance
from ._regression import mean_gamma_deviance


from .scorer import check_scoring
from .scorer import make_scorer
from .scorer import SCORERS
from .scorer import get_scorer
from ._scorer import check_scoring
from ._scorer import make_scorer
from ._scorer import SCORERS
from ._scorer import get_scorer

from ._plot.roc_curve import plot_roc_curve
from ._plot.roc_curve import RocCurveDisplay
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion sklearn/metrics/ranking.py → sklearn/metrics/_ranking.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
from ..preprocessing import label_binarize
from ..preprocessing._label import _encode

from .base import _average_binary_score, _average_multiclass_ovo_score
from ._base import _average_binary_score, _average_multiclass_ovo_score


def auc(x, y):
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion sklearn/metrics/scorer.py → sklearn/metrics/_scorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ def check_scoring(estimator, scoring=None, allow_none=False):
module = getattr(scoring, '__module__', None)
if hasattr(module, 'startswith') and \
module.startswith('sklearn.metrics.') and \
not module.startswith('sklearn.metrics.scorer') and \
not module.startswith('sklearn.metrics._scorer') and \
not module.startswith('sklearn.metrics.tests.'):
raise ValueError('scoring value %r looks like it is a metric '
'function rather than a scorer. A scorer should '
Expand Down
36 changes: 18 additions & 18 deletions sklearn/metrics/cluster/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,24 @@
- supervised, which uses a ground truth class values for each sample.
- unsupervised, which does not and measures the 'quality' of the model itself.
"""
from .supervised import adjusted_mutual_info_score
from .supervised import normalized_mutual_info_score
from .supervised import adjusted_rand_score
from .supervised import completeness_score
from .supervised import contingency_matrix
from .supervised import expected_mutual_information
from .supervised import homogeneity_completeness_v_measure
from .supervised import homogeneity_score
from .supervised import mutual_info_score
from .supervised import v_measure_score
from .supervised import fowlkes_mallows_score
from .supervised import entropy
from .unsupervised import silhouette_samples
from .unsupervised import silhouette_score
from .unsupervised import calinski_harabasz_score
from .unsupervised import calinski_harabaz_score
from .unsupervised import davies_bouldin_score
from .bicluster import consensus_score
from ._supervised import adjusted_mutual_info_score
from ._supervised import normalized_mutual_info_score
from ._supervised import adjusted_rand_score
from ._supervised import completeness_score
from ._supervised import contingency_matrix
from ._supervised import expected_mutual_information
from ._supervised import homogeneity_completeness_v_measure
from ._supervised import homogeneity_score
from ._supervised import mutual_info_score
from ._supervised import v_measure_score
from ._supervised import fowlkes_mallows_score
from ._supervised import entropy
from ._unsupervised import silhouette_samples
from ._unsupervised import silhouette_score
from ._unsupervised import calinski_harabasz_score
from ._unsupervised import calinski_harabaz_score
from ._unsupervised import davies_bouldin_score
from ._bicluster import consensus_score

__all__ = ["adjusted_mutual_info_score", "normalized_mutual_info_score",
"adjusted_rand_score", "completeness_score", "contingency_matrix",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import numpy as np
from scipy import sparse as sp

from .expected_mutual_info_fast import expected_mutual_information
from ._expected_mutual_info_fast import expected_mutual_information
from ...utils.validation import check_array, check_consistent_length
from ...utils.fixes import comb, _astype_copy_false

Expand Down
4 changes: 2 additions & 2 deletions sklearn/metrics/cluster/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ def configuration(parent_package="", top_path=None):
libraries = []
if os.name == 'posix':
libraries.append('m')
config.add_extension("expected_mutual_info_fast",
sources=["expected_mutual_info_fast.pyx"],
config.add_extension("_expected_mutual_info_fast",
sources=["_expected_mutual_info_fast.pyx"],
include_dirs=[numpy.get_include()],
libraries=libraries)

Expand Down
2 changes: 1 addition & 1 deletion sklearn/metrics/cluster/tests/test_bicluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from sklearn.utils.testing import assert_almost_equal

from sklearn.metrics.cluster.bicluster import _jaccard
from sklearn.metrics.cluster._bicluster import _jaccard
from sklearn.metrics import consensus_score


Expand Down
2 changes: 1 addition & 1 deletion sklearn/metrics/cluster/tests/test_supervised.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from sklearn.metrics.cluster import mutual_info_score
from sklearn.metrics.cluster import normalized_mutual_info_score
from sklearn.metrics.cluster import v_measure_score
from sklearn.metrics.cluster.supervised import _generalized_average
from sklearn.metrics.cluster._supervised import _generalized_average

from sklearn.utils import assert_all_finite
from sklearn.utils.testing import (
Expand Down
2 changes: 1 addition & 1 deletion sklearn/metrics/pairwise.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from ..preprocessing import normalize
from ..utils._mask import _get_mask

from .pairwise_fast import _chi2_kernel_fast, _sparse_manhattan
from ._pairwise_fast import _chi2_kernel_fast, _sparse_manhattan
from ..exceptions import DataConversionWarning


Expand Down
4 changes: 2 additions & 2 deletions sklearn/metrics/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ def configuration(parent_package="", top_path=None):
config.add_subpackage('_plot.tests')
config.add_subpackage('cluster')

config.add_extension("pairwise_fast",
sources=["pairwise_fast.pyx"],
config.add_extension("_pairwise_fast",
sources=["_pairwise_fast.pyx"],
libraries=libraries)

config.add_subpackage('tests')
Expand Down
2 changes: 1 addition & 1 deletion sklearn/metrics/tests/test_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
from sklearn.metrics import brier_score_loss
from sklearn.metrics import multilabel_confusion_matrix

from sklearn.metrics.classification import _check_targets
from sklearn.metrics._classification import _check_targets
from sklearn.exceptions import UndefinedMetricWarning

from scipy.spatial.distance import hamming as sp_hamming
Expand Down
2 changes: 1 addition & 1 deletion sklearn/metrics/tests/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
from sklearn.metrics import ndcg_score
from sklearn.metrics import dcg_score

from sklearn.metrics.base import _average_binary_score
from sklearn.metrics._base import _average_binary_score


# Note toward developers about metric testing
Expand Down
4 changes: 2 additions & 2 deletions sklearn/metrics/tests/test_ranking.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
from sklearn.metrics import label_ranking_loss
from sklearn.metrics import roc_auc_score
from sklearn.metrics import roc_curve
from sklearn.metrics.ranking import _ndcg_sample_scores, _dcg_sample_scores
from sklearn.metrics.ranking import ndcg_score, dcg_score
from sklearn.metrics._ranking import _ndcg_sample_scores, _dcg_sample_scores
from sklearn.metrics import ndcg_score, dcg_score

from sklearn.exceptions import UndefinedMetricWarning

Expand Down
2 changes: 1 addition & 1 deletion sklearn/metrics/tests/test_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from sklearn.metrics import r2_score
from sklearn.metrics import mean_tweedie_deviance

from sklearn.metrics.regression import _check_reg_targets
from sklearn.metrics._regression import _check_reg_targets

from ...exceptions import UndefinedMetricWarning

Expand Down
7 changes: 4 additions & 3 deletions sklearn/metrics/tests/test_score_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,11 @@
log_loss, precision_score, recall_score,
jaccard_score)
from sklearn.metrics import cluster as cluster_module
from sklearn.metrics.scorer import (check_scoring, _PredictScorer,
_passthrough_scorer, _MultimetricScorer)
from sklearn.metrics import check_scoring
from sklearn.metrics._scorer import (_PredictScorer, _passthrough_scorer,
_MultimetricScorer,
_check_multimetric_scoring)
from sklearn.metrics import accuracy_score
from sklearn.metrics.scorer import _check_multimetric_scoring
from sklearn.metrics import make_scorer, get_scorer, SCORERS
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import LinearSVC
Expand Down
4 changes: 2 additions & 2 deletions sklearn/model_selection/_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@
from ..utils.random import sample_without_replacement
from ..utils.validation import indexable, check_is_fitted
from ..utils.metaestimators import if_delegate_has_method
from ..metrics.scorer import _check_multimetric_scoring
from ..metrics.scorer import check_scoring
from ..metrics._scorer import _check_multimetric_scoring
from ..metrics import check_scoring


__all__ = ['GridSearchCV', 'ParameterGrid', 'fit_grid_point',
Expand Down
4 changes: 2 additions & 2 deletions sklearn/model_selection/_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
_message_with_time)
from ..utils.validation import _is_arraylike, _num_samples
from ..utils.metaestimators import _safe_split
from ..metrics.scorer import (check_scoring, _check_multimetric_scoring,
_MultimetricScorer)
from ..metrics import check_scoring
from ..metrics._scorer import _check_multimetric_scoring, _MultimetricScorer
from ..exceptions import FitFailedWarning
from ._split import check_cv
from ..preprocessing import LabelEncoder
Expand Down